Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion src/adapter/cli-to-openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,11 @@ export function cliResultToOpenai(
/**
* Normalize Claude model names to a consistent format
* e.g., "claude-sonnet-4-5-20250929" -> "claude-sonnet-4"
*
* Handles undefined model (e.g., when rate limit is hit and modelUsage is empty)
*/
function normalizeModelName(model: string): string {
function normalizeModelName(model: string | undefined): string {
if (!model) return "claude-sonnet-4";
if (model.includes("opus")) return "claude-opus-4";
if (model.includes("sonnet")) return "claude-sonnet-4";
if (model.includes("haiku")) return "claude-haiku-4";
Expand Down
87 changes: 76 additions & 11 deletions src/adapter/openai-to-cli.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,21 +10,32 @@ export interface CliInput {
prompt: string;
model: ClaudeModel;
sessionId?: string;
systemPrompt?: string;
tools?: string[];
}

const MODEL_MAP: Record<string, ClaudeModel> = {
// Direct model names
"claude-opus-4": "opus",
"claude-opus-4-6": "opus", // Note: CLI doesn't have a separate 4.6 model, uses regular opus (200k limit)
"claude-sonnet-4": "sonnet",
"claude-sonnet-4-5": "sonnet",
"claude-haiku-4": "haiku",
// With provider prefix
"claude-code-cli/claude-opus-4": "opus",
"claude-code-cli/claude-opus-4-6": "opus",
"claude-code-cli/claude-sonnet-4": "sonnet",
"claude-code-cli/claude-sonnet-4-5": "sonnet",
"claude-code-cli/claude-haiku-4": "haiku",
// Claude-max prefix (from Clawdbot config)
"claude-max/claude-opus-4-6": "opus",
"claude-max/claude-sonnet-4-5": "sonnet",
// Aliases
"opus": "opus",
"sonnet": "sonnet",
"haiku": "haiku",
"opus-max": "opus",
"sonnet-max": "sonnet",
};

/**
Expand All @@ -47,43 +58,97 @@ export function extractModel(model: string): ClaudeModel {
}

/**
* Convert OpenAI messages array to a single prompt string for Claude CLI
* Extract text content from a message content field
* Handles both string content and array of content parts (multimodal)
*/
function extractTextContent(content: string | Array<{ type: string; text?: string; image_url?: { url: string } }>): string {
// If it's already a string, return it directly
if (typeof content === "string") {
return content;
}

// If it's an array of content parts, extract text from each text part
if (Array.isArray(content)) {
return content
.filter((part): part is { type: "text"; text: string } => part.type === "text" && typeof part.text === "string")
.map((part) => part.text)
.join("\n");
}

// Fallback for unexpected types
return String(content);
}

/**
* Extract system messages and conversation from OpenAI messages array
*
* Claude Code CLI in --print mode expects a single prompt, not a conversation.
* We format the messages into a readable format that preserves context.
* System messages should be passed via --append-system-prompt flag,
* not embedded in the user prompt (more reliable for OpenClaw integration).
*/
export function messagesToPrompt(messages: OpenAIChatRequest["messages"]): string {
const parts: string[] = [];
export function extractMessagesContent(messages: OpenAIChatRequest["messages"]): {
systemPrompt: string | undefined;
conversationPrompt: string;
} {
const systemParts: string[] = [];
const conversationParts: string[] = [];

for (const msg of messages) {
const text = extractTextContent(msg.content);

switch (msg.role) {
case "system":
// System messages become context instructions
parts.push(`<system>\n${msg.content}\n</system>\n`);
case "developer":
// System/developer messages go to --append-system-prompt flag
// "developer" is OpenAI's newer role for system-level instructions
systemParts.push(text);
break;

case "user":
// User messages are the main prompt
parts.push(msg.content);
conversationParts.push(text);
break;

case "assistant":
// Previous assistant responses for context
parts.push(`<previous_response>\n${msg.content}\n</previous_response>\n`);
conversationParts.push(`<previous_response>\n${text}\n</previous_response>\n`);
break;
}
}

return parts.join("\n").trim();
return {
systemPrompt: systemParts.length > 0 ? systemParts.join("\n\n").trim() : undefined,
conversationPrompt: conversationParts.join("\n").trim(),
};
}

/**
* Convert OpenAI messages array to a single prompt string for Claude CLI
*
* @deprecated Use extractMessagesContent instead for better system prompt handling
*/
export function messagesToPrompt(messages: OpenAIChatRequest["messages"]): string {
const { systemPrompt, conversationPrompt } = extractMessagesContent(messages);

if (systemPrompt) {
return `<system>\n${systemPrompt}\n</system>\n\n${conversationPrompt}`;
}

return conversationPrompt;
}

/**
* Convert OpenAI chat request to CLI input format
*/
export function openaiToCli(request: OpenAIChatRequest): CliInput {
const { systemPrompt, conversationPrompt } = extractMessagesContent(request.messages);

return {
prompt: messagesToPrompt(request.messages),
prompt: conversationPrompt,
model: extractModel(request.model),
sessionId: request.user, // Use OpenAI's user field for session mapping
systemPrompt,
// TODO: Extract tool names from request.tools and map to Claude Code tool names
// For now, let Claude Code use all its builtin tools
tools: undefined,
};
}
24 changes: 22 additions & 2 deletions src/server/routes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -175,10 +175,14 @@ async function handleStreamingResponse(
resolve();
});

// Start the subprocess
// Start the subprocess with OpenClaw workspace as cwd
const workspacePath = process.env.OPENCLAW_WORKSPACE || process.env.CLAWDBOT_WORKSPACE;
subprocess.start(cliInput.prompt, {
model: cliInput.model,
sessionId: cliInput.sessionId,
systemPrompt: cliInput.systemPrompt,
tools: cliInput.tools,
cwd: workspacePath,
}).catch((err) => {
console.error("[Streaming] Subprocess start error:", err);
reject(err);
Expand Down Expand Up @@ -229,11 +233,15 @@ async function handleNonStreamingResponse(
resolve();
});

// Start the subprocess
// Start the subprocess with OpenClaw workspace as cwd
const workspacePath = process.env.OPENCLAW_WORKSPACE || process.env.CLAWDBOT_WORKSPACE;
subprocess
.start(cliInput.prompt, {
model: cliInput.model,
sessionId: cliInput.sessionId,
systemPrompt: cliInput.systemPrompt,
tools: cliInput.tools,
cwd: workspacePath,
})
.catch((error) => {
res.status(500).json({
Expand All @@ -257,12 +265,24 @@ export function handleModels(_req: Request, res: Response): void {
res.json({
object: "list",
data: [
{
id: "claude-opus-4-6",
object: "model",
owned_by: "anthropic",
created: Math.floor(Date.now() / 1000),
},
{
id: "claude-opus-4",
object: "model",
owned_by: "anthropic",
created: Math.floor(Date.now() / 1000),
},
{
id: "claude-sonnet-4-5",
object: "model",
owned_by: "anthropic",
created: Math.floor(Date.now() / 1000),
},
{
id: "claude-sonnet-4",
object: "model",
Expand Down
69 changes: 62 additions & 7 deletions src/subprocess/manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ import type { ClaudeModel } from "../adapter/openai-to-cli.js";
export interface SubprocessOptions {
model: ClaudeModel;
sessionId?: string;
systemPrompt?: string;
tools?: string[];
cwd?: string;
timeout?: number;
}
Expand All @@ -36,12 +38,24 @@ export interface SubprocessEvents {

const DEFAULT_TIMEOUT = 300000; // 5 minutes

// Debug logging controlled by environment variable
const DEBUG = process.env.DEBUG_SUBPROCESS === "true";

export class ClaudeSubprocess extends EventEmitter {
private process: ChildProcess | null = null;
private buffer: string = "";
private timeoutId: NodeJS.Timeout | null = null;
private isKilled: boolean = false;

/**
* Conditional debug logging
*/
private debug(...args: any[]): void {
if (DEBUG) {
console.error(...args);
}
}

/**
* Start the Claude CLI subprocess with the given prompt
*/
Expand Down Expand Up @@ -81,15 +95,31 @@ export class ClaudeSubprocess extends EventEmitter {
}
});

// Close stdin since we pass prompt as argument
this.process.stdin?.end();
// Write prompt to stdin instead of passing as argument (avoids ENAMETOOLONG on Windows)
// If system prompt is large, prepend it to the prompt instead of using --append-system-prompt
if (this.process.stdin) {
let fullPrompt = prompt;

// If we have a system prompt that wasn't added via CLI args (because it's too long),
// prepend it to the prompt with XML tags
if (options.systemPrompt && options.systemPrompt.length > 8000) {
fullPrompt = `<system>\n${options.systemPrompt}\n</system>\n\n${prompt}`;
this.debug(`[Subprocess] System prompt too long (${options.systemPrompt.length} chars), prepending to stdin instead of CLI arg`);
}

this.debug(`[Subprocess] Writing ${fullPrompt.length} chars to stdin`);
this.debug(`[Subprocess] Prompt preview (first 500 chars):\n${fullPrompt.slice(0, 500)}`);
this.debug(`[Subprocess] Prompt preview (last 500 chars):\n${fullPrompt.slice(-500)}`);
this.process.stdin.write(fullPrompt + "\n");
this.process.stdin.end();
}

console.error(`[Subprocess] Process spawned with PID: ${this.process.pid}`);
this.debug(`[Subprocess] Process spawned with PID: ${this.process.pid}`);

// Parse JSON stream from stdout
this.process.stdout?.on("data", (chunk: Buffer) => {
const data = chunk.toString();
console.error(`[Subprocess] Received ${data.length} bytes of stdout`);
this.debug(`[Subprocess] Received ${data.length} bytes of stdout`);
this.buffer += data;
this.processBuffer();
});
Expand All @@ -100,13 +130,13 @@ export class ClaudeSubprocess extends EventEmitter {
if (errorText) {
// Don't emit as error unless it's actually an error
// Claude CLI may write debug info to stderr
console.error("[Subprocess stderr]:", errorText.slice(0, 200));
this.debug("[Subprocess stderr]:", errorText.slice(0, 200));
}
});

// Handle process close
this.process.on("close", (code) => {
console.error(`[Subprocess] Process closed with code: ${code}`);
this.debug(`[Subprocess] Process closed with code: ${code}`);
this.clearTimeout();
// Process any remaining buffer
if (this.buffer.trim()) {
Expand Down Expand Up @@ -137,13 +167,36 @@ export class ClaudeSubprocess extends EventEmitter {
"--model",
options.model, // Model alias (opus/sonnet/haiku)
"--no-session-persistence", // Don't save sessions
prompt, // Pass prompt as argument (more reliable than stdin)
"--dangerously-skip-permissions", // Allow file operations (running as service)
];

// Add system prompt if provided (backstory/memories from OpenClaw)
// Only use --append-system-prompt for short system prompts to avoid ENAMETOOLONG on Windows
// Long system prompts (>8000 chars) are prepended to stdin instead
if (options.systemPrompt) {
this.debug(`[Subprocess] System prompt: ${options.systemPrompt.length} chars`);
if (options.systemPrompt.length <= 8000) {
this.debug(`[Subprocess] Adding system prompt via --append-system-prompt (short enough for CLI)`);
args.push("--append-system-prompt", options.systemPrompt);
} else {
this.debug(`[Subprocess] System prompt too long for CLI arg, will prepend to stdin`);
}
} else {
this.debug("[Subprocess] NO system prompt provided");
}

// Add tool restrictions if provided
if (options.tools && options.tools.length > 0) {
args.push("--tools", options.tools.join(","));
}

if (options.sessionId) {
args.push("--session-id", options.sessionId);
}

// Prompt is passed via stdin to avoid Windows ENAMETOOLONG error
// (not as CLI argument like in the original PR)

return args;
}

Expand All @@ -166,8 +219,10 @@ export class ClaudeSubprocess extends EventEmitter {
// Emit content delta for streaming
this.emit("content_delta", message as ClaudeCliStreamEvent);
} else if (isAssistantMessage(message)) {
this.debug(`[Response] Assistant message:`, JSON.stringify(message.message.content));
this.emit("assistant", message);
} else if (isResultMessage(message)) {
this.debug(`[Response] Result:`, message.result);
this.emit("result", message);
}
} catch {
Expand Down
19 changes: 17 additions & 2 deletions src/types/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,24 @@
* Used for Clawdbot integration
*/

// Content can be a string or array of content parts (multimodal)
export interface OpenAITextContentPart {
type: "text";
text: string;
}

export interface OpenAIImageContentPart {
type: "image_url";
image_url: {
url: string;
};
}

export type OpenAIContentPart = OpenAITextContentPart | OpenAIImageContentPart;

export interface OpenAIChatMessage {
role: "system" | "user" | "assistant";
content: string;
role: "system" | "developer" | "user" | "assistant";
content: string | OpenAIContentPart[];
}

export interface OpenAIChatRequest {
Expand Down