Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added .aic/graph.db
Binary file not shown.
14 changes: 14 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ The philosophy behind Conductor is simple: control your code. By treating contex
- **Iterate safely**: Review plans before code is written, keeping you firmly in the loop.
- **Work as a team**: Set project-level context for your product, tech stack, and workflow preferences that become a shared foundation for your team.
- **Build on existing projects**: Intelligent initialization for both new (Greenfield) and existing (Brownfield) projects.
- **Semantic Awareness (AIC)**: Automatically indexes your codebase into "Rich Skeletons" using the AI Compiler (AIC). This functionality is powered by a local **Model Context Protocol (MCP)** server that exposes tools for semantic indexing and context retrieval ( `index_repo`, `get_file_context`) directly to the Gemini agent.
- **Smart revert**: A git-aware revert command that understands logical units of work (tracks, phases, tasks) rather than just commit hashes.

## Installation
Expand Down Expand Up @@ -112,8 +113,21 @@ During implementation, you can also:
| `/conductor:status` | Displays the current progress of the tracks file and active tracks. | Reads `conductor/tracks.md` |
| `/conductor:revert` | Reverts a track, phase, or task by analyzing git history. | Reverts git history |

## Architecture

Conductor leverages the **Model Context Protocol (MCP)** to provide deep, local integration with your codebase.

- **Client**: The Gemini CLI acts as the MCP client.
- **Server**: The `aic` package runs as a local MCP server (`python3 -m aic.server`).
- **Tools**: The server exposes the following tools to the agent:
- `aic_index`: Builds/updates the semantic dependency graph.
- `aic_get_file_context`: Retrieves token-optimized skeletons for files and their dependencies.
- `aic_list_directory`: Provides filesystem visibility.
- `aic_run_shell_command`: Allows safe execution of setup and maintenance commands.

## Resources

- [AI Compiler Patent](https://www.tdcommons.org/dpubs_series/8241/): Semantic Dependency Graph for AI Agents
- [Gemini CLI extensions](https://geminicli.com/docs/extensions/): Documentation about using extensions in Gemini CLI
- [GitHub issues](https://github.com/gemini-cli-extensions/conductor/issues): Report bugs or request features

Expand Down
Empty file added aic/__init__.py
Empty file.
99 changes: 99 additions & 0 deletions aic/cli.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
import argparse
import os
from aic.db import init_db, upsert_node, get_node, get_dependencies, update_edges, mark_dirty
from aic.skeleton import UniversalSkeletonizer
from aic.utils import calculate_hash, resolve_dep_to_path, get_ignore_patterns, should_ignore

def index_repo(root_dir="."):
init_db()
skeletonizer = UniversalSkeletonizer()
ignore_patterns = get_ignore_patterns(root_dir)

indexed_count = 0

for root, dirs, files in os.walk(root_dir):
# Exclusions
dirs[:] = [d for d in dirs if not should_ignore(d, ignore_patterns)]

for file in files:
if should_ignore(file, ignore_patterns):
continue

file_path = os.path.join(root, file)
rel_path = os.path.relpath(file_path, root_dir)

# Skip non-text files to avoid reading binaries
# Simple heuristic: check extension or try reading
try:
with open(file_path, 'r', encoding='utf-8', errors='strict') as f:
content = f.read()
except UnicodeDecodeError:
# print(f"Skipping binary file: {rel_path}")
continue
except Exception as e:
print(f"Skipping {rel_path}: {e}")
continue

current_hash = calculate_hash(content)
existing = get_node(rel_path)

if existing and existing['hash'] == current_hash:
continue

print(f"Indexing: {rel_path}")
skeleton, dependencies = skeletonizer.skeletonize(content, rel_path)
upsert_node(rel_path, current_hash, skeleton)
mark_dirty(rel_path)

# Resolve dependencies to file paths
resolved_deps = []
for dep in dependencies:
resolved = resolve_dep_to_path(dep, rel_path, root_dir)
if resolved:
resolved_deps.append(resolved)

update_edges(rel_path, resolved_deps)
indexed_count += 1

print(f"Finished indexing. Processed {indexed_count} files.")

def get_context(file_path):
node = get_node(file_path)
if not node:
return f"# Error: {file_path} not indexed."

output = [f"# Context for {file_path}", node['skeleton'], ""]

deps = get_dependencies(file_path)
if deps:
output.append("## Dependencies")
for dep in deps:
dep_node = get_node(dep)
if dep_node:
output.append(f"### {dep}")
output.append(dep_node['skeleton'])
output.append("")

return "\n".join(output)

def main():
parser = argparse.ArgumentParser(description="AIC: AI Compiler")
subparsers = parser.add_subparsers(dest="command")

subparsers.add_parser("index")

context_parser = subparsers.add_parser("context")
context_parser.add_argument("file")

args = parser.parse_args()

if args.command == "index":
index_repo()
print("Finished indexing.")
elif args.command == "context":
print(get_context(args.file))
else:
parser.print_help()

if __name__ == "__main__":
main()
65 changes: 65 additions & 0 deletions aic/db.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import sqlite3
import os

DB_PATH = ".aic/graph.db"

def get_connection():
os.makedirs(os.path.dirname(DB_PATH), exist_ok=True)
conn = sqlite3.connect(DB_PATH)
conn.row_factory = sqlite3.Row
return conn

def init_db():
with get_connection() as conn:
conn.execute("""
CREATE TABLE IF NOT EXISTS nodes (
path TEXT PRIMARY KEY,
hash TEXT,
skeleton TEXT,
status TEXT DEFAULT 'CLEAN'
)
""")
conn.execute("""
CREATE TABLE IF NOT EXISTS edges (
source TEXT,
target TEXT,
PRIMARY KEY (source, target),
FOREIGN KEY(source) REFERENCES nodes(path)
)
""")

def upsert_node(path, hash_val, skeleton):
with get_connection() as conn:
conn.execute("""
INSERT INTO nodes (path, hash, skeleton, status)
VALUES (?, ?, ?, 'CLEAN')
ON CONFLICT(path) DO UPDATE SET
hash = excluded.hash,
skeleton = excluded.skeleton,
status = 'CLEAN'
""", (path, hash_val, skeleton))

def mark_dirty(path):
"""Mark all nodes that depend on this path as DIRTY."""
with get_connection() as conn:
conn.execute("""
UPDATE nodes
SET status = 'DIRTY'
WHERE path IN (
SELECT source FROM edges WHERE target = ?
)
""", (path,))

def update_edges(source_path, target_paths):
with get_connection() as conn:
conn.execute("DELETE FROM edges WHERE source = ?", (source_path,))
for target in target_paths:
conn.execute("INSERT OR IGNORE INTO edges (source, target) VALUES (?, ?)", (source_path, target))

def get_node(path):
with get_connection() as conn:
return conn.execute("SELECT * FROM nodes WHERE path = ?", (path,)).fetchone()

def get_dependencies(path):
with get_connection() as conn:
return [row['target'] for row in conn.execute("SELECT target FROM edges WHERE source = ?", (path,)).fetchall()]
Loading