diff --git a/app/api/chat/route.ts b/app/api/chat/route.ts
index 49116d0..14e1990 100644
--- a/app/api/chat/route.ts
+++ b/app/api/chat/route.ts
@@ -1,10 +1,11 @@
-import { createGoogleGenerativeAI } from '@ai-sdk/google';
-import { createOpenAI } from '@ai-sdk/openai';
-import { createOpenRouter } from '@openrouter/ai-sdk-provider';
-import { streamText, smoothStream } from 'ai';
-import { headers } from 'next/headers';
-import { getModelConfig, AIModel } from '@/lib/models';
-import { NextRequest, NextResponse } from 'next/server';
+import { createGoogleGenerativeAI } from "@ai-sdk/google";
+import { createOpenAI } from "@ai-sdk/openai";
+import { createOpenRouter } from "@openrouter/ai-sdk-provider";
+import { createLLMGateway } from "@llmgateway/ai-sdk-provider";
+import { streamText, smoothStream } from "ai";
+import { headers } from "next/headers";
+import { getModelConfig, AIModel } from "@/lib/models";
+import { NextRequest, NextResponse } from "next/server";
export const maxDuration = 60;
@@ -19,27 +20,32 @@ export async function POST(req: NextRequest) {
let aiModel;
switch (modelConfig.provider) {
- case 'google':
+ case "google":
const google = createGoogleGenerativeAI({ apiKey });
aiModel = google(modelConfig.modelId);
break;
- case 'openai':
+ case "openai":
const openai = createOpenAI({ apiKey });
aiModel = openai(modelConfig.modelId);
break;
- case 'openrouter':
+ case "openrouter":
const openrouter = createOpenRouter({ apiKey });
aiModel = openrouter(modelConfig.modelId);
break;
+ case "llmgateway":
+ const llmgateway = createLLMGateway({ apiKey });
+ aiModel = llmgateway(modelConfig.modelId);
+ break;
+
default:
return new Response(
- JSON.stringify({ error: 'Unsupported model provider' }),
+ JSON.stringify({ error: "Unsupported model provider" }),
{
status: 400,
- headers: { 'Content-Type': 'application/json' },
+ headers: { "Content-Type": "application/json" },
}
);
}
@@ -48,7 +54,7 @@ export async function POST(req: NextRequest) {
model: aiModel,
messages,
onError: (error) => {
- console.log('error', error);
+ console.log("error", error);
},
system: `
You are Chat0, an ai assistant that can answer questions and help with tasks.
@@ -65,10 +71,11 @@ export async function POST(req: NextRequest) {
- Display:
$$\\frac{d}{dx}\\sin(x) = \\cos(x)$$
`,
- experimental_transform: [smoothStream({ chunking: 'word' })],
+ experimental_transform: [smoothStream({ chunking: "word" })],
abortSignal: req.signal,
});
+
return result.toDataStreamResponse({
sendReasoning: true,
getErrorMessage: (error) => {
@@ -76,12 +83,12 @@ export async function POST(req: NextRequest) {
},
});
} catch (error) {
- console.log('error', error);
+ console.log("error", error);
return new NextResponse(
- JSON.stringify({ error: 'Internal Server Error' }),
+ JSON.stringify({ error: "Internal Server Error" }),
{
status: 500,
- headers: { 'Content-Type': 'application/json' },
+ headers: { "Content-Type": "application/json" },
}
);
}
diff --git a/frontend/components/APIKeyForm.tsx b/frontend/components/APIKeyForm.tsx
index 86336e0..348c6ad 100644
--- a/frontend/components/APIKeyForm.tsx
+++ b/frontend/components/APIKeyForm.tsx
@@ -22,6 +22,7 @@ const formSchema = z.object({
message: 'Google API key is required for Title Generation',
}),
openrouter: z.string().trim().optional(),
+ llmgateway: z.string().trim().optional(),
openai: z.string().trim().optional(),
});
@@ -84,6 +85,16 @@ const Form = () => {
required
/>
+
+
{
try {
const payload: MessageSummaryPayload = await response.json();
- if (response.ok) {
- const { title, isTitle, messageId, threadId } = payload;
+ const { title, isTitle, messageId, threadId } = payload;
if (isTitle) {
await updateThread(threadId, title);
@@ -31,9 +29,6 @@ export const useMessageSummary = () => {
} else {
await createMessageSummary(threadId, messageId, title);
}
- } else {
- toast.error('Failed to generate a summary for the message');
- }
} catch (error) {
console.error(error);
}
diff --git a/frontend/stores/APIKeyStore.ts b/frontend/stores/APIKeyStore.ts
index 2f92be9..a87ea64 100644
--- a/frontend/stores/APIKeyStore.ts
+++ b/frontend/stores/APIKeyStore.ts
@@ -1,7 +1,7 @@
import { create, Mutate, StoreApi } from 'zustand';
import { persist } from 'zustand/middleware';
-export const PROVIDERS = ['google', 'openrouter', 'openai'] as const;
+export const PROVIDERS = ['google', 'openrouter', 'llmgateway', 'openai'] as const;
export type Provider = (typeof PROVIDERS)[number];
type APIKeys = Record;
@@ -38,6 +38,7 @@ export const useAPIKeyStore = create()(
keys: {
google: '',
openrouter: '',
+ llmgateway: '',
openai: '',
},
diff --git a/lib/models.ts b/lib/models.ts
index a868083..1b75d79 100644
--- a/lib/models.ts
+++ b/lib/models.ts
@@ -7,6 +7,8 @@ export const AI_MODELS = [
'Gemini 2.5 Flash',
'GPT-4o',
'GPT-4.1-mini',
+ 'Claude 3.7 Sonnet',
+ 'Claude 3.5 Sonnet'
] as const;
export type AIModel = (typeof AI_MODELS)[number];
@@ -18,6 +20,16 @@ export type ModelConfig = {
};
export const MODEL_CONFIGS = {
+ 'Claude 3.7 Sonnet': {
+ modelId: 'claude-3-7-sonnet-20250219',
+ provider: 'llmgateway',
+ headerKey: 'Authorization',
+ },
+ 'Claude 3.5 Sonnet': {
+ modelId: 'claude-3-5-sonnet-20241022',
+ provider: 'llmgateway',
+ headerKey: 'Authorization',
+ },
'Deepseek R1 0528': {
modelId: 'deepseek/deepseek-r1-0528:free',
provider: 'openrouter',
diff --git a/package.json b/package.json
index 4ce4a43..01f990c 100644
--- a/package.json
+++ b/package.json
@@ -9,10 +9,12 @@
"lint": "next lint"
},
"dependencies": {
+ "@ai-sdk/anthropic": "^1.2.12",
"@ai-sdk/google": "^1.2.19",
"@ai-sdk/openai": "^1.3.22",
"@ai-sdk/react": "^1.2.12",
"@hookform/resolvers": "^5.0.1",
+ "@llmgateway/ai-sdk-provider": "^1.0.1",
"@openrouter/ai-sdk-provider": "^0.4.6",
"@radix-ui/react-dialog": "^1.1.14",
"@radix-ui/react-dropdown-menu": "^2.1.15",
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index f9a440c..6976717 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -8,6 +8,9 @@ importers:
.:
dependencies:
+ '@ai-sdk/anthropic':
+ specifier: ^1.2.12
+ version: 1.2.12(zod@3.25.56)
'@ai-sdk/google':
specifier: ^1.2.19
version: 1.2.19(zod@3.25.56)
@@ -20,6 +23,9 @@ importers:
'@hookform/resolvers':
specifier: ^5.0.1
version: 5.0.1(react-hook-form@7.57.0(react@19.1.0))
+ '@llmgateway/ai-sdk-provider':
+ specifier: ^1.0.1
+ version: 1.0.1(ai@4.3.16(react@19.1.0)(zod@3.25.56))(zod@3.25.56)
'@openrouter/ai-sdk-provider':
specifier: ^0.4.6
version: 0.4.6(zod@3.25.56)
@@ -168,6 +174,12 @@ importers:
packages:
+ '@ai-sdk/anthropic@1.2.12':
+ resolution: {integrity: sha512-YSzjlko7JvuiyQFmI9RN1tNZdEiZxc+6xld/0tq/VkJaHpEzGAb1yiNxxvmYVcjvfu/PcvCxAAYXmTYQQ63IHQ==}
+ engines: {node: '>=18'}
+ peerDependencies:
+ zod: ^3.0.0
+
'@ai-sdk/google@1.2.19':
resolution: {integrity: sha512-Xgl6eftIRQ4srUdCzxM112JuewVMij5q4JLcNmHcB68Bxn9dpr3MVUSPlJwmameuiQuISIA8lMB+iRiRbFsaqA==}
engines: {node: '>=18'}
@@ -472,6 +484,13 @@ packages:
'@jridgewell/trace-mapping@0.3.25':
resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==}
+ '@llmgateway/ai-sdk-provider@1.0.1':
+ resolution: {integrity: sha512-PmIpoFK96oBVisN7n49r95idxDnr5WaX4PVsprj5ZxLvcLvP7Ies0nnCvHR23nJTG97pDVhX+R5NLs9jApBhdw==}
+ engines: {node: '>=18'}
+ peerDependencies:
+ ai: ^4.3.16
+ zod: ^3.25.34
+
'@napi-rs/wasm-runtime@0.2.10':
resolution: {integrity: sha512-bCsCyeZEwVErsGmyPNSzwfwFn4OdxBj0mmv6hOFucB/k81Ojdu68RbZdxYsRQUPc9l6SU5F/cG+bXgWs3oUgsQ==}
@@ -3117,6 +3136,12 @@ packages:
snapshots:
+ '@ai-sdk/anthropic@1.2.12(zod@3.25.56)':
+ dependencies:
+ '@ai-sdk/provider': 1.1.3
+ '@ai-sdk/provider-utils': 2.2.8(zod@3.25.56)
+ zod: 3.25.56
+
'@ai-sdk/google@1.2.19(zod@3.25.56)':
dependencies:
'@ai-sdk/provider': 1.1.3
@@ -3399,6 +3424,13 @@ snapshots:
'@jridgewell/resolve-uri': 3.1.2
'@jridgewell/sourcemap-codec': 1.5.0
+ '@llmgateway/ai-sdk-provider@1.0.1(ai@4.3.16(react@19.1.0)(zod@3.25.56))(zod@3.25.56)':
+ dependencies:
+ '@ai-sdk/provider': 1.1.3
+ '@ai-sdk/provider-utils': 2.2.8(zod@3.25.56)
+ ai: 4.3.16(react@19.1.0)(zod@3.25.56)
+ zod: 3.25.56
+
'@napi-rs/wasm-runtime@0.2.10':
dependencies:
'@emnapi/core': 1.4.3