diff --git a/src/adapter/openai-to-cli.ts b/src/adapter/openai-to-cli.ts
index c8ecaa1..a285c24 100644
--- a/src/adapter/openai-to-cli.ts
+++ b/src/adapter/openai-to-cli.ts
@@ -46,6 +46,28 @@ export function extractModel(model: string): ClaudeModel {
return "opus";
}
+/**
+ * Normalize message content to a plain string.
+ *
+ * The OpenAI chat completions API allows `content` to be either a string or
+ * an array of content parts (e.g. [{type: "text", text: "Hello"}]). Clients
+ * such as OpenClaw always send the array form, so we need to handle both.
+ */
+function extractContent(content: string | Array<{type: string; text?: string}> | unknown): string {
+ if (typeof content === "string") return content;
+ if (Array.isArray(content)) {
+ return content
+ .map(part => {
+ if (typeof part === "string") return part;
+ if (part && part.type === "text" && typeof part.text === "string") return part.text;
+ return "";
+ })
+ .filter(Boolean)
+ .join("\n");
+ }
+ return String(content ?? "");
+}
+
/**
* Convert OpenAI messages array to a single prompt string for Claude CLI
*
@@ -59,17 +81,17 @@ export function messagesToPrompt(messages: OpenAIChatRequest["messages"]): strin
switch (msg.role) {
case "system":
// System messages become context instructions
- parts.push(`\n${msg.content}\n\n`);
+ parts.push(`\n${extractContent(msg.content)}\n\n`);
break;
case "user":
// User messages are the main prompt
- parts.push(msg.content);
+ parts.push(extractContent(msg.content));
break;
case "assistant":
// Previous assistant responses for context
- parts.push(`\n${msg.content}\n\n`);
+ parts.push(`\n${extractContent(msg.content)}\n\n`);
break;
}
}