Skip to content

Commit d17d38a

Browse files
authored
Added Open Router Support for Chat-Backend
Open Router (PR-147), fix and testing.
2 parents 3a4df8a + 4c7909a commit d17d38a

File tree

10 files changed

+233
-9
lines changed

10 files changed

+233
-9
lines changed

README.md

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,7 @@ The various features of Amica mainly use and support the following technologies:
5656
- [Ollama](https://ollama.ai)
5757
- [KoboldCpp](https://github.com/LostRuins/koboldcpp)
5858
- [Oobabooga](https://github.com/oobabooga/text-generation-webui/wiki)
59+
- [OpenRouter](https://openrouter.ai/) (access to multiple AI models)
5960
- Text-to-Speech
6061
- [Eleven Labs API](https://elevenlabs.io/)
6162
- [Speech T5](https://huggingface.co/microsoft/speecht5_tts)
@@ -94,6 +95,14 @@ Once started, please visit the following URL to confirm that it is working prope
9495

9596
Most of the configuration is done in the `.env.local` file. Reference the `config.ts` file for the available options.
9697

98+
#### OpenRouter Configuration
99+
100+
To use OpenRouter as a chat backend, set the following environment variables in your `.env.local` file:
101+
102+
- `NEXT_PUBLIC_OPENROUTER_APIKEY`: Your OpenRouter API key (required)
103+
- `NEXT_PUBLIC_OPENROUTER_URL`: Custom OpenRouter API URL (optional, defaults to https://openrouter.ai/api/v1)
104+
- `NEXT_PUBLIC_OPENROUTER_MODEL`: Default OpenRouter model (optional, defaults to openai/gpt-3.5-turbo)
105+
97106
```bash
98107
amica
99108
├── .env.local

src/components/settings.tsx

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@ import { NamePage } from './settings/NamePage';
6565
import { SystemPromptPage } from './settings/SystemPromptPage';
6666
import { AmicaLifePage } from "./settings/AmicaLifePage";
6767
import { useVrmStoreContext } from "@/features/vrmStore/vrmStoreContext";
68+
import { OpenRouterSettings } from "./settings/OpenRouterSettingsPage";
6869

6970
export const Settings = ({
7071
onClickClose,
@@ -91,6 +92,9 @@ export const Settings = ({
9192
const [koboldAiUrl, setKoboldAiUrl] = useState(config("koboldai_url"));
9293
const [koboldAiUseExtra, setKoboldAiUseExtra] = useState<boolean>(config("koboldai_use_extra") === 'true' ? true : false);
9394
const [koboldAiStopSequence, setKoboldAiStopSequence] = useState(config("koboldai_stop_sequence"));
95+
const [openRouterApiKey, setOpenRouterApiKey] = useState(config("openrouter_apikey"));
96+
const [openRouterUrl, setOpenRouterUrl] = useState(config("openrouter_url"));
97+
const [openRouterModel, setOpenRouterModel] = useState(config("openrouter_model"));
9498

9599
const [ttsBackend, setTTSBackend] = useState(config("tts_backend"));
96100
const [elevenlabsApiKey, setElevenlabsApiKey] = useState(config("elevenlabs_apikey"));
@@ -241,6 +245,7 @@ export const Settings = ({
241245
llamaCppUrl, llamaCppStopSequence,
242246
ollamaUrl, ollamaModel,
243247
koboldAiUrl, koboldAiUseExtra, koboldAiStopSequence,
248+
openRouterApiKey, openRouterUrl, openRouterModel,
244249
ttsBackend,
245250
elevenlabsApiKey, elevenlabsVoiceId,
246251
speechT5SpeakerEmbeddingsUrl,
@@ -284,7 +289,7 @@ export const Settings = ({
284289

285290
case 'chatbot':
286291
return <MenuPage
287-
keys={["chatbot_backend", "name", "system_prompt", "chatgpt_settings", "llamacpp_settings", "ollama_settings", "koboldai_settings"]}
292+
keys={["chatbot_backend", "name", "system_prompt", "chatgpt_settings", "llamacpp_settings", "ollama_settings", "koboldai_settings", "openrouter_settings"]}
288293
menuClick={handleMenuClick} />;
289294

290295
case 'tts':
@@ -402,6 +407,17 @@ export const Settings = ({
402407
setSettingsUpdated={setSettingsUpdated}
403408
/>
404409

410+
case 'openrouter_settings':
411+
return <OpenRouterSettings
412+
openRouterUrl={openRouterUrl}
413+
setOpenRouterUrl={setOpenRouterUrl}
414+
openRouterApiKey={openRouterApiKey}
415+
setOpenRouterApiKey={setOpenRouterApiKey}
416+
openRouterModel={openRouterModel}
417+
setOpenRouterModel={setOpenRouterModel}
418+
setSettingsUpdated={setSettingsUpdated}
419+
/>
420+
405421
case 'tts_backend':
406422
return <TTSBackendPage
407423
ttsBackend={ttsBackend}

src/components/settings/ChatbotBackendPage.tsx

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ const chatbotBackends = [
1212
...isTauri() ? [] : [{key: "windowai", label: "Window.ai"}], // Hides Window.ai when using the desktop app
1313
{key: "ollama", label: "Ollama"},
1414
{key: "koboldai", label: "KoboldAI"},
15+
{key: "openrouter", label: "OpenRouter"},
1516
];
1617

1718
function idToTitle(id: string): string {
@@ -72,7 +73,7 @@ export function ChatbotBackendPage({
7273
</select>
7374
</FormRow>
7475
</li>
75-
{ ["chatgpt", "llamacpp", "ollama", "koboldai"].includes(chatbotBackend) && (
76+
{ ["chatgpt", "llamacpp", "ollama", "koboldai", "openrouter"].includes(chatbotBackend) && (
7677
<li className="py-4">
7778
<FormRow label={`${t("Configure")} ${t(idToTitle(chatbotBackend))}`}>
7879
<button
Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
import { BasicPage, FormRow, NotUsingAlert } from './common';
2+
import { TextInput } from '@/components/textInput';
3+
import { SecretTextInput } from '@/components/secretTextInput';
4+
import { config, updateConfig } from "@/utils/config";
5+
6+
7+
export function OpenRouterSettings({
8+
openRouterApiKey,
9+
setOpenRouterApiKey,
10+
openRouterUrl,
11+
setOpenRouterUrl,
12+
openRouterModel,
13+
setOpenRouterModel,
14+
setSettingsUpdated,
15+
}: {
16+
openRouterApiKey: string;
17+
setOpenRouterApiKey: (key: string) => void;
18+
openRouterUrl: string;
19+
setOpenRouterUrl: (url: string) => void;
20+
openRouterModel: string;
21+
setOpenRouterModel: (model: string) => void;
22+
setSettingsUpdated: (updated: boolean) => void;
23+
}) {
24+
const description = <>Configure OpenRouter settings. You can get an API key from <a href="https://openrouter.ai">https://openrouter.ai</a></>;
25+
26+
return (
27+
<BasicPage
28+
title="OpenRouter Settings"
29+
description={description}
30+
>
31+
{ config("chatbot_backend") !== "openrouter" && (
32+
<NotUsingAlert>
33+
You are not currently using OpenRouter as your ChatBot backend. These settings will not be used.
34+
</NotUsingAlert>
35+
) }
36+
<ul role="list" className="divide-y divide-gray-100 max-w-xs">
37+
<li className="py-4">
38+
<FormRow label="OpenRouter API Key">
39+
<SecretTextInput
40+
value={openRouterApiKey}
41+
onChange={(event: React.ChangeEvent<any>) => {
42+
setOpenRouterApiKey(event.target.value);
43+
updateConfig("openrouter_apikey", event.target.value);
44+
setSettingsUpdated(true);
45+
}}
46+
/>
47+
</FormRow>
48+
</li>
49+
<li className="py-4">
50+
<FormRow label="OpenAI URL">
51+
<TextInput
52+
value={openRouterUrl}
53+
onChange={(event: React.ChangeEvent<any>) => {
54+
setOpenRouterUrl(event.target.value);
55+
updateConfig("openrouter_url", event.target.value);
56+
setSettingsUpdated(true);
57+
}}
58+
/>
59+
</FormRow>
60+
</li>
61+
<li className="py-4">
62+
<FormRow label="OpenAI Model">
63+
<TextInput
64+
value={openRouterModel}
65+
onChange={(event: React.ChangeEvent<any>) => {
66+
setOpenRouterModel(event.target.value);
67+
updateConfig("openrouter_model", event.target.value);
68+
setSettingsUpdated(true);
69+
}}
70+
/>
71+
</FormRow>
72+
</li>
73+
</ul>
74+
</BasicPage>
75+
);
76+
}

src/components/settings/common.tsx

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ export function getIconFromPage(page: string): JSX.Element {
145145
case 'community': return <RocketLaunchIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
146146

147147
case 'background_img': return <PhotoIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
148-
case 'background_color': return <SwatchIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
148+
case 'background_color': return <SwatchIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
149149
case 'background_video': return <FilmIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
150150
case 'character_model': return <UsersIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
151151
case 'character_animation': return <AdjustmentsHorizontalIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
@@ -155,6 +155,7 @@ export function getIconFromPage(page: string): JSX.Element {
155155
case 'llamacpp_settings': return <AdjustmentsHorizontalIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
156156
case 'ollama_settings': return <AdjustmentsHorizontalIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
157157
case 'koboldai_settings': return <AdjustmentsHorizontalIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
158+
case 'openrouter_settings': return <AdjustmentsHorizontalIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
158159
case 'name': return <IdentificationIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
159160
case 'system_prompt': return <DocumentTextIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
160161

@@ -204,6 +205,7 @@ function getLabelFromPage(page: string): string {
204205
case 'llamacpp_settings': return t('LLama.cpp');
205206
case 'ollama_settings': return t('Ollama');
206207
case 'koboldai_settings': return t('KoboldAI');
208+
case 'openrouter_settings': return t('OpenRouter');
207209
case 'name' : return t('Name');
208210
case 'system_prompt': return t('System Prompt');
209211

src/env.d.ts

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
declare namespace NodeJS {
2+
interface ProcessEnv {
3+
// OpenRouter Configuration
4+
NEXT_PUBLIC_OPENROUTER_APIKEY?: string;
5+
NEXT_PUBLIC_OPENROUTER_URL?: string;
6+
NEXT_PUBLIC_OPENROUTER_MODEL?: string;
7+
8+
// Existing environment variables (preserving for type safety)
9+
NEXT_PUBLIC_CHATBOT_BACKEND?: string;
10+
NEXT_PUBLIC_OPENAI_APIKEY?: string;
11+
NEXT_PUBLIC_OPENAI_URL?: string;
12+
NEXT_PUBLIC_OPENAI_MODEL?: string;
13+
NEXT_PUBLIC_LLAMACPP_URL?: string;
14+
NEXT_PUBLIC_LLAMACPP_STOP_SEQUENCE?: string;
15+
NEXT_PUBLIC_OLLAMA_URL?: string;
16+
NEXT_PUBLIC_OLLAMA_MODEL?: string;
17+
NEXT_PUBLIC_KOBOLDAI_URL?: string;
18+
NEXT_PUBLIC_KOBOLDAI_USE_EXTRA?: string;
19+
NEXT_PUBLIC_KOBOLDAI_STOP_SEQUENCE?: string;
20+
}
21+
}

src/features/chat/chat.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ import { cleanTalk } from "@/utils/cleanTalk";
2525
import { processResponse } from "@/utils/processResponse";
2626
import { wait } from "@/utils/wait";
2727
import { isCharacterIdle, characterIdleTime, resetIdleTimer } from "@/utils/isIdle";
28+
import { getOpenRouterChatResponseStream } from './openRouterChat';
2829

2930

3031
type Speak = {
@@ -543,6 +544,8 @@ export class Chat {
543544
return getOllamaChatResponseStream(messages);
544545
case 'koboldai':
545546
return getKoboldAiChatResponseStream(messages);
547+
case 'openrouter':
548+
return getOpenRouterChatResponseStream(messages);
546549
}
547550

548551
return getEchoChatResponseStream(messages);

src/features/chat/openRouterChat.ts

Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
import { Message } from './messages';
2+
import { config } from '@/utils/config';
3+
4+
/**
5+
* Gets a streaming chat response from OpenRouter API.
6+
* OpenRouter provides an OpenAI-compatible API with access to multiple models.
7+
*/
8+
export async function getOpenRouterChatResponseStream(messages: Message[]): Promise<ReadableStream> {
9+
const apiKey = config('openrouter_apikey');
10+
if (!apiKey) {
11+
throw new Error('OpenRouter API key is required');
12+
}
13+
14+
const baseUrl = config('openrouter_url') ?? 'https://openrouter.ai/api/v1';
15+
const model = config('openrouter_model') ?? 'openai/gpt-3.5-turbo';
16+
const appUrl = 'https://amica.arbius.ai';
17+
18+
const response = await fetch(`${baseUrl}/chat/completions`, {
19+
method: 'POST',
20+
headers: {
21+
'Authorization': `Bearer ${apiKey}`,
22+
'Content-Type': 'application/json',
23+
'HTTP-Referer': appUrl,
24+
'X-Title': 'Amica Chat'
25+
},
26+
body: JSON.stringify({
27+
model,
28+
messages: messages.map(({ role, content }) => ({ role, content })),
29+
stream: true
30+
})
31+
});
32+
33+
const reader = response.body?.getReader();
34+
if (!response.ok || !reader) {
35+
const error = await response.json();
36+
// Handle OpenRouter-specific error format
37+
if (error.error?.message) {
38+
throw new Error(`OpenRouter error: ${error.error.message}`);
39+
}
40+
throw new Error(`OpenRouter request failed with status ${response.status}`);
41+
}
42+
43+
const stream = new ReadableStream({
44+
async start(controller: ReadableStreamDefaultController) {
45+
const decoder = new TextDecoder("utf-8");
46+
try {
47+
// sometimes the response is chunked, so we need to combine the chunks
48+
let combined = "";
49+
while (true) {
50+
const { done, value } = await reader.read();
51+
if (done) break;
52+
const data = decoder.decode(value);
53+
const chunks = data
54+
.split("data:")
55+
.filter((val) => !!val && val.trim() !== "[DONE]");
56+
57+
for (const chunk of chunks) {
58+
// skip comments
59+
if (chunk.length > 0 && chunk[0] === ":") {
60+
continue;
61+
}
62+
combined += chunk;
63+
64+
try {
65+
const json = JSON.parse(combined);
66+
const messagePiece = json.choices[0].delta.content;
67+
combined = "";
68+
if (!!messagePiece) {
69+
controller.enqueue(messagePiece);
70+
}
71+
} catch (error) {
72+
console.error(error);
73+
}
74+
}
75+
}
76+
} catch (error) {
77+
console.error(error);
78+
controller.error(error);
79+
} finally {
80+
reader?.releaseLock();
81+
controller.close();
82+
}
83+
},
84+
async cancel() {
85+
await reader?.cancel();
86+
reader?.releaseLock();
87+
}
88+
});
89+
return stream;
90+
}

src/utils/askLlm.ts

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import { getLlamaCppChatResponseStream } from "@/features/chat/llamaCppChat";
77
import { getWindowAiChatResponseStream } from "@/features/chat/windowAiChat";
88
import { getOllamaChatResponseStream } from "@/features/chat/ollamaChat";
99
import { getKoboldAiChatResponseStream } from "@/features/chat/koboldAiChat";
10+
import { getOpenRouterChatResponseStream } from "@/features/chat/openRouterChat";
1011

1112
import { config } from "@/utils/config";
1213
import { processResponse } from "@/utils/processResponse";
@@ -50,6 +51,8 @@ export async function askLLM(
5051
return getOllamaChatResponseStream(messages);
5152
case "koboldai":
5253
return getKoboldAiChatResponseStream(messages);
54+
case "openrouter":
55+
return getOpenRouterChatResponseStream(messages);
5356
default:
5457
return getEchoChatResponseStream(messages);
5558
}
@@ -111,7 +114,7 @@ export async function askLLM(
111114

112115
receivedMessage += value;
113116
receivedMessage = receivedMessage.trimStart();
114-
117+
115118

116119
if (chat !== null) {
117120
const proc = processResponse({
@@ -132,26 +135,26 @@ export async function askLLM(
132135
screenplay: aiTalks[0],
133136
streamIdx: currentStreamIdx,
134137
});
135-
138+
136139
if (! firstSentenceEncountered) {
137140
console.timeEnd('performance_time_to_first_sentence');
138141
firstSentenceEncountered = true;
139142
}
140-
143+
141144
return false; // normal processing
142145
}
143146
});
144-
147+
145148
sentences = proc.sentences;
146149
aiTextLog = proc.aiTextLog;
147150
receivedMessage = proc.receivedMessage;
148151
tag = proc.tag;
149152
rolePlay = proc.rolePlay;
150153
if (proc.shouldBreak) {
151154
break;
152-
}
155+
}
153156
}
154-
157+
155158
}
156159
} catch (e: any) {
157160
const errMsg = e.toString();

src/utils/config.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,9 @@ const defaults = {
3333
koboldai_url: process.env.NEXT_PUBLIC_KOBOLDAI_URL ?? 'http://localhost:5001',
3434
koboldai_use_extra: process.env.NEXT_PUBLIC_KOBOLDAI_USE_EXTRA ?? 'false',
3535
koboldai_stop_sequence: process.env.NEXT_PUBLIC_KOBOLDAI_STOP_SEQUENCE ?? '(End)||[END]||Note||***||You:||User:||</s>',
36+
openrouter_apikey: process.env.NEXT_PUBLIC_OPENROUTER_APIKEY ?? '',
37+
openrouter_url: process.env.NEXT_PUBLIC_OPENROUTER_URL ?? 'https://openrouter.ai/api/v1',
38+
openrouter_model: process.env.NEXT_PUBLIC_OPENROUTER_MODEL ?? 'openai/gpt-3.5-turbo',
3639
tts_muted: 'false',
3740
tts_backend: process.env.NEXT_PUBLIC_TTS_BACKEND ?? 'piper',
3841
stt_backend: process.env.NEXT_PUBLIC_STT_BACKEND ?? 'whisper_browser',

0 commit comments

Comments
 (0)