Skip to content

Commit

Permalink
fixed OpenRouter chat provider support
Browse files Browse the repository at this point in the history
  • Loading branch information
flukexp committed Dec 12, 2024
1 parent acc05d9 commit 4c7909a
Show file tree
Hide file tree
Showing 5 changed files with 150 additions and 6 deletions.
18 changes: 17 additions & 1 deletion src/components/settings.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ import { NamePage } from './settings/NamePage';
import { SystemPromptPage } from './settings/SystemPromptPage';
import { AmicaLifePage } from "./settings/AmicaLifePage";
import { useVrmStoreContext } from "@/features/vrmStore/vrmStoreContext";
import { OpenRouterSettings } from "./settings/OpenRouterSettingsPage";

export const Settings = ({
onClickClose,
Expand All @@ -91,6 +92,9 @@ export const Settings = ({
const [koboldAiUrl, setKoboldAiUrl] = useState(config("koboldai_url"));
const [koboldAiUseExtra, setKoboldAiUseExtra] = useState<boolean>(config("koboldai_use_extra") === 'true' ? true : false);
const [koboldAiStopSequence, setKoboldAiStopSequence] = useState(config("koboldai_stop_sequence"));
const [openRouterApiKey, setOpenRouterApiKey] = useState(config("openrouter_apikey"));
const [openRouterUrl, setOpenRouterUrl] = useState(config("openrouter_url"));
const [openRouterModel, setOpenRouterModel] = useState(config("openrouter_model"));

const [ttsBackend, setTTSBackend] = useState(config("tts_backend"));
const [elevenlabsApiKey, setElevenlabsApiKey] = useState(config("elevenlabs_apikey"));
Expand Down Expand Up @@ -241,6 +245,7 @@ export const Settings = ({
llamaCppUrl, llamaCppStopSequence,
ollamaUrl, ollamaModel,
koboldAiUrl, koboldAiUseExtra, koboldAiStopSequence,
openRouterApiKey, openRouterUrl, openRouterModel,
ttsBackend,
elevenlabsApiKey, elevenlabsVoiceId,
speechT5SpeakerEmbeddingsUrl,
Expand Down Expand Up @@ -284,7 +289,7 @@ export const Settings = ({

case 'chatbot':
return <MenuPage
keys={["chatbot_backend", "name", "system_prompt", "chatgpt_settings", "llamacpp_settings", "ollama_settings", "koboldai_settings"]}
keys={["chatbot_backend", "name", "system_prompt", "chatgpt_settings", "llamacpp_settings", "ollama_settings", "koboldai_settings", "openrouter_settings"]}
menuClick={handleMenuClick} />;

case 'tts':
Expand Down Expand Up @@ -402,6 +407,17 @@ export const Settings = ({
setSettingsUpdated={setSettingsUpdated}
/>

case 'openrouter_settings':
return <OpenRouterSettings
openRouterUrl={openRouterUrl}
setOpenRouterUrl={setOpenRouterUrl}
openRouterApiKey={openRouterApiKey}
setOpenRouterApiKey={setOpenRouterApiKey}
openRouterModel={openRouterModel}
setOpenRouterModel={setOpenRouterModel}
setSettingsUpdated={setSettingsUpdated}
/>

case 'tts_backend':
return <TTSBackendPage
ttsBackend={ttsBackend}
Expand Down
76 changes: 76 additions & 0 deletions src/components/settings/OpenRouterSettingsPage.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import { BasicPage, FormRow, NotUsingAlert } from './common';
import { TextInput } from '@/components/textInput';
import { SecretTextInput } from '@/components/secretTextInput';
import { config, updateConfig } from "@/utils/config";


export function OpenRouterSettings({
openRouterApiKey,
setOpenRouterApiKey,
openRouterUrl,
setOpenRouterUrl,
openRouterModel,
setOpenRouterModel,
setSettingsUpdated,
}: {
openRouterApiKey: string;
setOpenRouterApiKey: (key: string) => void;
openRouterUrl: string;
setOpenRouterUrl: (url: string) => void;
openRouterModel: string;
setOpenRouterModel: (model: string) => void;
setSettingsUpdated: (updated: boolean) => void;
}) {
const description = <>Configure OpenRouter settings. You can get an API key from <a href="https://openrouter.ai">https://openrouter.ai</a></>;

return (
<BasicPage
title="OpenRouter Settings"
description={description}
>
{ config("chatbot_backend") !== "openrouter" && (
<NotUsingAlert>
You are not currently using OpenRouter as your ChatBot backend. These settings will not be used.
</NotUsingAlert>
) }
<ul role="list" className="divide-y divide-gray-100 max-w-xs">
<li className="py-4">
<FormRow label="OpenRouter API Key">
<SecretTextInput
value={openRouterApiKey}
onChange={(event: React.ChangeEvent<any>) => {
setOpenRouterApiKey(event.target.value);
updateConfig("openrouter_apikey", event.target.value);
setSettingsUpdated(true);
}}
/>
</FormRow>
</li>
<li className="py-4">
<FormRow label="OpenAI URL">
<TextInput
value={openRouterUrl}
onChange={(event: React.ChangeEvent<any>) => {
setOpenRouterUrl(event.target.value);
updateConfig("openrouter_url", event.target.value);
setSettingsUpdated(true);
}}
/>
</FormRow>
</li>
<li className="py-4">
<FormRow label="OpenAI Model">
<TextInput
value={openRouterModel}
onChange={(event: React.ChangeEvent<any>) => {
setOpenRouterModel(event.target.value);
updateConfig("openrouter_model", event.target.value);
setSettingsUpdated(true);
}}
/>
</FormRow>
</li>
</ul>
</BasicPage>
);
}
4 changes: 3 additions & 1 deletion src/components/settings/common.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ export function getIconFromPage(page: string): JSX.Element {
case 'community': return <RocketLaunchIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;

case 'background_img': return <PhotoIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
case 'background_color': return <SwatchIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
case 'background_color': return <SwatchIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
case 'background_video': return <FilmIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
case 'character_model': return <UsersIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
case 'character_animation': return <AdjustmentsHorizontalIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
Expand All @@ -155,6 +155,7 @@ export function getIconFromPage(page: string): JSX.Element {
case 'llamacpp_settings': return <AdjustmentsHorizontalIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
case 'ollama_settings': return <AdjustmentsHorizontalIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
case 'koboldai_settings': return <AdjustmentsHorizontalIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
case 'openrouter_settings': return <AdjustmentsHorizontalIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
case 'name': return <IdentificationIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;
case 'system_prompt': return <DocumentTextIcon className="h-5 w-5 flex-none text-gray-800" aria-hidden="true" />;

Expand Down Expand Up @@ -204,6 +205,7 @@ function getLabelFromPage(page: string): string {
case 'llamacpp_settings': return t('LLama.cpp');
case 'ollama_settings': return t('Ollama');
case 'koboldai_settings': return t('KoboldAI');
case 'openrouter_settings': return t('OpenRouter');
case 'name' : return t('Name');
case 'system_prompt': return t('System Prompt');

Expand Down
3 changes: 3 additions & 0 deletions src/features/chat/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import { cleanTalk } from "@/utils/cleanTalk";
import { processResponse } from "@/utils/processResponse";
import { wait } from "@/utils/wait";
import { isCharacterIdle, characterIdleTime, resetIdleTimer } from "@/utils/isIdle";
import { getOpenRouterChatResponseStream } from './openRouterChat';


type Speak = {
Expand Down Expand Up @@ -543,6 +544,8 @@ export class Chat {
return getOllamaChatResponseStream(messages);
case 'koboldai':
return getKoboldAiChatResponseStream(messages);
case 'openrouter':
return getOpenRouterChatResponseStream(messages);
}

return getEchoChatResponseStream(messages);
Expand Down
55 changes: 51 additions & 4 deletions src/features/chat/openRouterChat.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Message } from './chat';
import { Message } from './messages';
import { config } from '@/utils/config';

/**
Expand All @@ -13,7 +13,7 @@ export async function getOpenRouterChatResponseStream(messages: Message[]): Prom

const baseUrl = config('openrouter_url') ?? 'https://openrouter.ai/api/v1';
const model = config('openrouter_model') ?? 'openai/gpt-3.5-turbo';
const appUrl = config('app_url') ?? 'https://amica.chat';
const appUrl = 'https://amica.arbius.ai';

const response = await fetch(`${baseUrl}/chat/completions`, {
method: 'POST',
Expand All @@ -30,7 +30,8 @@ export async function getOpenRouterChatResponseStream(messages: Message[]): Prom
})
});

if (!response.ok) {
const reader = response.body?.getReader();
if (!response.ok || !reader) {
const error = await response.json();
// Handle OpenRouter-specific error format
if (error.error?.message) {
Expand All @@ -39,5 +40,51 @@ export async function getOpenRouterChatResponseStream(messages: Message[]): Prom
throw new Error(`OpenRouter request failed with status ${response.status}`);
}

return response.body!;
const stream = new ReadableStream({
async start(controller: ReadableStreamDefaultController) {
const decoder = new TextDecoder("utf-8");
try {
// sometimes the response is chunked, so we need to combine the chunks
let combined = "";
while (true) {
const { done, value } = await reader.read();
if (done) break;
const data = decoder.decode(value);
const chunks = data
.split("data:")
.filter((val) => !!val && val.trim() !== "[DONE]");

for (const chunk of chunks) {
// skip comments
if (chunk.length > 0 && chunk[0] === ":") {
continue;
}
combined += chunk;

try {
const json = JSON.parse(combined);
const messagePiece = json.choices[0].delta.content;
combined = "";
if (!!messagePiece) {
controller.enqueue(messagePiece);
}
} catch (error) {
console.error(error);
}
}
}
} catch (error) {
console.error(error);
controller.error(error);
} finally {
reader?.releaseLock();
controller.close();
}
},
async cancel() {
await reader?.cancel();
reader?.releaseLock();
}
});
return stream;
}

0 comments on commit 4c7909a

Please sign in to comment.