Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 12 additions & 12 deletions flake.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

138 changes: 138 additions & 0 deletions src/deepwork/core/adapters.py
Original file line number Diff line number Diff line change
Expand Up @@ -628,3 +628,141 @@ def sync_hooks(self, project_path: Path, hooks: dict[str, list[dict[str, Any]]])
# Gemini CLI does not support skill-level hooks
# Hooks are configured globally in settings.json, not per-skill
return 0


class CodexAdapter(AgentAdapter):
"""Adapter for OpenAI Codex CLI.

Codex CLI uses SKILL.md files in .codex/skills/ folders (same naming as Claude).
MCP servers are registered in .codex/config.toml under [mcp_servers.<name>].

Codex does NOT support skill-level hooks, so hook_name_mapping is empty.
Codex uses the self-review quality gate path (external_runner=None) since
there is no Codex CLI subprocess for running reviews.
"""

name = "codex"
display_name = "Codex CLI"
config_dir = ".codex"

# Codex CLI does NOT support skill-level hooks
hook_name_mapping: ClassVar[dict[SkillLifecycleHook, str]] = {}

def sync_hooks(self, project_path: Path, hooks: dict[str, list[dict[str, Any]]]) -> int:
"""
Sync hooks to Codex CLI settings.

Codex CLI does not support skill-level hooks. This method is a no-op.

Args:
project_path: Path to project root
hooks: Dict mapping lifecycle events to hook configurations (ignored)

Returns:
0 (Codex does not support skill-level hooks)
"""
return 0

def register_mcp_server(self, project_path: Path) -> bool:
"""
Register the DeepWork MCP server in .codex/config.toml.

Codex CLI reads MCP server configurations from config.toml using
[mcp_servers.<name>] TOML tables. No --external-runner flag is passed,
so the server uses self-review mode for quality gates.

Args:
project_path: Path to project root

Returns:
True if server was registered or updated, False if no changes needed

Raises:
AdapterError: If registration fails
"""
try:
import tomllib
except ModuleNotFoundError:
import tomli as tomllib # type: ignore[no-redef]

config_file = project_path / self.config_dir / "config.toml"

# Load existing config.toml or start fresh
existing_config: dict[str, Any] = {}
if config_file.exists():
try:
with open(config_file, "rb") as f:
existing_config = tomllib.load(f)
except Exception as e:
raise AdapterError(f"Failed to read .codex/config.toml: {e}") from e

# Build the expected MCP server config
# No --external-runner flag: quality gates use self-review mode
new_server_config = {
"command": "deepwork",
"args": ["serve", "--path", "."],
}

# Check if already registered with same config
mcp_servers = existing_config.get("mcp_servers", {})
existing_server = mcp_servers.get("deepwork", {})
if (
existing_server.get("command") == new_server_config["command"]
and existing_server.get("args") == new_server_config["args"]
):
return False

# Write config.toml with the MCP server entry
# We use a targeted approach: read existing content and append/update
# the [mcp_servers.deepwork] section
self._write_mcp_server_to_config(config_file, new_server_config)

return True

def _write_mcp_server_to_config(
self, config_file: Path, server_config: dict[str, Any]
) -> None:
"""
Write or update the [mcp_servers.deepwork] section in config.toml.

Args:
config_file: Path to .codex/config.toml
server_config: Server configuration dict with command and args

Raises:
AdapterError: If write fails
"""
import re

config_file.parent.mkdir(parents=True, exist_ok=True)

# Read existing content
existing_content = ""
if config_file.exists():
try:
existing_content = config_file.read_text(encoding="utf-8")
except OSError as e:
raise AdapterError(f"Failed to read {config_file}: {e}") from e

# Build the TOML section
args_toml = ", ".join(f'"{a}"' for a in server_config["args"])
new_section = (
f'[mcp_servers.deepwork]\n'
f'command = "{server_config["command"]}"\n'
f'args = [{args_toml}]\n'
)

# Replace existing section or append
pattern = r'\[mcp_servers\.deepwork\]\n(?:[^\[]*?)(?=\n\[|\Z)'
if re.search(pattern, existing_content, re.DOTALL):
updated_content = re.sub(pattern, new_section.rstrip(), existing_content, flags=re.DOTALL)
else:
# Append to end
separator = "\n" if existing_content and not existing_content.endswith("\n") else ""
extra_newline = "\n" if existing_content.strip() else ""
updated_content = existing_content + separator + extra_newline + new_section

try:
config_file.write_text(updated_content, encoding="utf-8")
except OSError as e:
raise AdapterError(f"Failed to write {config_file}: {e}") from e
37 changes: 37 additions & 0 deletions src/deepwork/templates/codex/skill-deepwork.md.jinja
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
{#
Template: skill-deepwork.md.jinja
Purpose: Generates the main /deepwork skill that instructs agents to use MCP tools

This template is used to create the entry-point skill for DeepWork.
Instead of containing step instructions, it directs agents to use the
DeepWork MCP server tools.
#}
---
name: deepwork
description: "Start or continue DeepWork workflows using MCP tools"
---

# DeepWork Workflow Manager

Execute multi-step workflows with quality gate checkpoints.

> **IMPORTANT**: Use the DeepWork MCP server tools. All workflow operations
> are performed through MCP tool calls and following the instructions they return,
> not by reading instructions from files.

## How to Use

1. Call `get_workflows` to discover available workflows
2. Call `start_workflow` with goal, job_name, and workflow_name
3. Follow the step instructions returned
4. Call `finished_step` with your outputs when done
5. Handle the response: `needs_work`, `next_step`, or `workflow_complete`

## Intent Parsing

When the user invokes `/deepwork`, parse their intent:
1. **ALWAYS**: Call `get_workflows` to discover available workflows
2. Based on the available flows and what the user said in their request, proceed:
- **Explicit workflow**: `/deepwork <a workflow name>` → start the `<a workflow name>` workflow
- **General request**: `/deepwork <a request>` → infer best match from available workflows
- **No context**: `/deepwork` alone → ask user to choose from available workflows
8 changes: 8 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,14 @@ def mock_multi_platform_project(mock_git_repo: Path) -> Path:
return mock_git_repo


@pytest.fixture
def mock_codex_project(mock_git_repo: Path) -> Path:
"""Create a mock project with Codex CLI setup."""
codex_dir = mock_git_repo / ".codex"
codex_dir.mkdir(exist_ok=True)
return mock_git_repo


@pytest.fixture
def fixtures_dir() -> Path:
"""Return the path to the fixtures directory."""
Expand Down
45 changes: 45 additions & 0 deletions tests/integration/test_install_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,51 @@ def test_install_with_multiple_platforms_auto_detect(
# so we don't assert on Gemini skill existence - the install will show
# an error for Gemini skill generation but continue

def test_install_with_codex(self, mock_codex_project: Path) -> None:
"""Test installing DeepWork in a Codex CLI project."""
runner = CliRunner()

result = runner.invoke(
cli,
["install", "--platform", "codex", "--path", str(mock_codex_project)],
catch_exceptions=False,
)

assert result.exit_code == 0
assert "DeepWork Installation" in result.output
assert "Git repository found" in result.output
assert "Codex CLI detected" in result.output
assert "DeepWork installed successfully" in result.output

# Verify directory structure
deepwork_dir = mock_codex_project / ".deepwork"
assert deepwork_dir.exists()
assert (deepwork_dir / "jobs").exists()

# Verify config.yml
config_file = deepwork_dir / "config.yml"
assert config_file.exists()
config = load_yaml(config_file)
assert config is not None
assert "codex" in config["platforms"]

# Verify MCP entry point skill was created (deepwork/SKILL.md)
codex_dir = mock_codex_project / ".codex" / "skills"
assert (codex_dir / "deepwork" / "SKILL.md").exists()

# Verify deepwork skill content references MCP tools
deepwork_skill = (codex_dir / "deepwork" / "SKILL.md").read_text()
assert "deepwork" in deepwork_skill.lower()

# Verify MCP server registered in .codex/config.toml
config_toml = mock_codex_project / ".codex" / "config.toml"
assert config_toml.exists()
toml_content = config_toml.read_text()
assert "[mcp_servers.deepwork]" in toml_content
assert 'command = "deepwork"' in toml_content
# No --external-runner flag for Codex (uses self-review mode)
assert "external-runner" not in toml_content

def test_install_with_specified_platform_when_missing(self, mock_git_repo: Path) -> None:
"""Test that install fails when specified platform is not present."""
runner = CliRunner()
Expand Down
Loading