Skip to content

Commit f54e55a

Browse files
authored
Merge pull request #102 from RipeSeed/feature/model-persistent
feat: Selected model persistent in the dropdown
2 parents cf58f9b + e4d220c commit f54e55a

File tree

6 files changed

+87
-66
lines changed

6 files changed

+87
-66
lines changed

src/app/_components/ChatMessages.tsx

+4-3
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,6 @@ export function ChatMessages() {
4040
updateStateMetadata,
4141
resetStateMetadata,
4242
addedAskRSmsg,
43-
isOpenAI,
4443
} = useStore()
4544

4645
const queryClient = useQueryClient()
@@ -109,10 +108,9 @@ export function ChatMessages() {
109108

110109
useEffect(() => {
111110
if (messages.length) {
112-
addedAskRSmsg()
113111
scrollToBottom()
114112
}
115-
}, [addedAskRSmsg, messages])
113+
}, [messages])
116114

117115
useEffect(() => {
118116
if (clearChat) {
@@ -200,6 +198,9 @@ export function ChatMessages() {
200198
setMessages((prev) => [...prev, chatbotMessage])
201199
const _id = await appendMessageContent_aRS(-1, chatbotMessage.content)
202200

201+
const storedModel = localStorage.getItem('selected_model');
202+
const isOpenAI = storedModel === 'openai';
203+
203204
await sendMessageMutation({
204205
message: tmpMessage,
205206
uId,

src/app/_utils/store/store.ts

-4
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@ interface State {
1010
openAIKey: string
1111
clearChat: boolean
1212
askRSmsg: boolean
13-
isOpenAI: boolean
1413

1514
stateMetadata: {
1615
chatId: number
@@ -27,7 +26,6 @@ interface State {
2726
toggleDeleteDialogOpen: () => void
2827
setOpenAIKey: (key: string) => void
2928
setClearChat: (value: boolean) => void
30-
setIsOpenAI: (value: boolean) => void
3129
updateStateMetadata: (metadata: Partial<State['stateMetadata']>) => void
3230
resetStateMetadata: () => void
3331
}
@@ -40,7 +38,6 @@ const useStore = create<State>((set) => ({
4038
openAIKey: '',
4139
clearChat: false,
4240
askRSmsg: false,
43-
isOpenAI: true,
4441

4542
stateMetadata: {
4643
chatId: 0,
@@ -59,7 +56,6 @@ const useStore = create<State>((set) => ({
5956
set((state) => ({ isDeleteDialogOpen: !state.isDeleteDialogOpen })),
6057
setOpenAIKey: (key) => set({ openAIKey: key }),
6158
setClearChat: (value) => set({ clearChat: value }),
62-
setIsOpenAI: (value) => set({ isOpenAI: value }),
6359

6460
updateStateMetadata: (metadata) =>
6561
set((state) => ({

src/app/api/chat/ask-ripeseed/route.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ export async function POST(request: Request) {
1515
[indexId],
1616
apiKey,
1717
true,
18-
isOpenAI
18+
isOpenAI,
1919
)
2020

2121
return new Response(streamedResponse, {
Original file line numberDiff line numberDiff line change
@@ -1,39 +1,56 @@
1-
import React from 'react'
1+
import React, { useEffect, useState } from 'react'
22

3-
import useStore from '@/app/_utils/store/store'
43
import {
54
Select,
65
SelectContent,
76
SelectItem,
87
SelectTrigger,
98
SelectValue,
10-
} from '@/components/ui/select'
9+
} from "@/components/ui/select";
1110

1211
interface ModelSelectProps {
13-
className?: string
12+
className?: string;
1413
}
1514

16-
const ModelSelect: React.FC<ModelSelectProps> = ({ className = '' }) => {
17-
const { isOpenAI, setIsOpenAI } = useStore()
15+
const ModelSelect: React.FC<ModelSelectProps> = ({
16+
className = '',
17+
}) => {
1818

19+
const [selectedModel, setSelectedModel] = useState(() => {
20+
if (typeof window !== 'undefined') {
21+
return localStorage.getItem('selected_model') || 'openai';
22+
}
23+
return 'openai';
24+
});
25+
26+
// Update localStorage when model changes
1927
const handleValueChange = (value: string) => {
20-
setIsOpenAI(value === 'openai')
21-
}
28+
setSelectedModel(value);
29+
localStorage.setItem('selected_model', value);
30+
};
31+
32+
// Sync with localStorage on mount
33+
useEffect(() => {
34+
const storedModel = localStorage.getItem('selected_model');
35+
if (storedModel) {
36+
setSelectedModel(storedModel);
37+
}
38+
}, []);
2239

2340
return (
2441
<Select
25-
defaultValue={isOpenAI ? 'openai' : 'deepseek'}
42+
value={selectedModel}
2643
onValueChange={handleValueChange}
2744
>
2845
<SelectTrigger className={`w-32 ${className}`}>
29-
<SelectValue placeholder='Select Model' />
46+
<SelectValue placeholder="Select Model" />
3047
</SelectTrigger>
3148
<SelectContent>
32-
<SelectItem value='deepseek'>DeepSeek</SelectItem>
33-
<SelectItem value='openai'>OpenAI</SelectItem>
49+
<SelectItem value="deepseek">DeepSeek</SelectItem>
50+
<SelectItem value="openai">OpenAI</SelectItem>
3451
</SelectContent>
3552
</Select>
36-
)
37-
}
53+
);
54+
};
3855

39-
export default ModelSelect
56+
export default ModelSelect;

src/dal/message/askRS_sendMessage.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ export const askRS_sendMessage = async ({
3434
content: message.content,
3535
},
3636
],
37-
isOpenAI
37+
isOpenAI,
3838
})
3939

4040
const requestOptions = {

src/services/chat/conversation.ts

+49-42
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,16 @@
1-
import { OpenAIEmbeddings } from '@langchain/openai'
1+
import { OpenAIEmbeddings } from '@langchain/openai'
22
import { Document } from 'langchain/document'
33
import { formatDocumentsAsString } from 'langchain/util/document'
44

55
import 'server-only'
66

7-
import { pineconeIndex } from './config'
87
import { OpenAI } from 'openai'
98

9+
import { pineconeIndex } from './config'
10+
1011
export interface Context {
11-
role: 'system' | 'user' | 'assistant' | 'tool' | 'function';
12-
content: string;
12+
role: 'system' | 'user' | 'assistant' | 'tool' | 'function'
13+
content: string
1314
}
1415

1516
const instructions = `
@@ -26,37 +27,41 @@ const instructions = `
2627
Note: If user asks something NOT related to ripeseed, like any code snippet any other general question excuse them politely and ask them to ask the relevant questions regarding ripeseed.
2728
`
2829

29-
const tools: OpenAI.Chat.ChatCompletionTool[] = [
30+
const tools: OpenAI.Chat.ChatCompletionTool[] = [
3031
{
31-
"type": "function",
32-
"function": {
33-
"name": "book_meeting_call_appointment",
34-
"description": "If someone wants to talk, books calls, meetings, appointments, or any meet-up with RipeSeed",
35-
"parameters": {
36-
"type": "object",
37-
"properties": {},
38-
"required": []
39-
}
40-
}
41-
}
32+
type: 'function',
33+
function: {
34+
name: 'book_meeting_call_appointment',
35+
description:
36+
'If someone wants to talk, books calls, meetings, appointments, or any meet-up with RipeSeed',
37+
parameters: {
38+
type: 'object',
39+
properties: {},
40+
required: [],
41+
},
42+
},
43+
},
4244
]
4345

4446
interface QuestionGeneratorInput {
45-
instructions: string;
46-
context: string;
47-
chatHistory?: string;
48-
question: string;
47+
instructions: string
48+
context: string
49+
chatHistory?: string
50+
question: string
4951
}
5052

51-
const getChain = async (questionGeneratorInput: QuestionGeneratorInput, isOpenAi: boolean) => {
52-
53-
const openai = isOpenAi ? new OpenAI() : new OpenAI({
54-
baseURL: process.env.DEEPSEEK_BASE_URL,
55-
apiKey: process.env.DEEPSEEK_API_KEY
56-
});
57-
58-
const finalPrompt =
59-
`Use the following pieces of context to answer the question at the end.
53+
const getChain = async (
54+
questionGeneratorInput: QuestionGeneratorInput,
55+
isOpenAi: boolean,
56+
) => {
57+
const openai = isOpenAi
58+
? new OpenAI()
59+
: new OpenAI({
60+
baseURL: process.env.DEEPSEEK_BASE_URL,
61+
apiKey: process.env.DEEPSEEK_API_KEY,
62+
})
63+
64+
const finalPrompt = `Use the following pieces of context to answer the question at the end.
6065
----------
6166
CONTEXT: ${questionGeneratorInput.context}
6267
----------
@@ -68,20 +73,20 @@ const getChain = async (questionGeneratorInput: QuestionGeneratorInput, isOpenAi
6873

6974
const messages = [
7075
{
71-
role: "system" as const,
72-
content: instructions
76+
role: 'system' as const,
77+
content: instructions,
7378
},
74-
{ role: "user" as const, content: finalPrompt }
75-
];
79+
{ role: 'user' as const, content: finalPrompt },
80+
]
7681

7782
const stream: any = await openai.chat.completions.create({
7883
model: isOpenAi ? 'gpt-4o-mini' : 'deepseek-chat',
7984
messages: messages,
8085
stream: true,
8186
temperature: 0,
8287
tools: tools,
83-
tool_choice: "auto"
84-
});
88+
tool_choice: 'auto',
89+
})
8590

8691
return stream
8792
}
@@ -146,19 +151,21 @@ export function converse(
146151
}
147152

148153
const stream = await getChain(questionGeneratorInput, isOpenAi)
149-
let completeMessage = '';
154+
let completeMessage = ''
150155
for await (const chunk of stream) {
151156
if (chunk.choices[0]?.delta?.content) {
152-
const content = chunk.choices[0].delta.content;
153-
controller.enqueue(content);
154-
completeMessage += content;
157+
const content = chunk.choices[0].delta.content
158+
controller.enqueue(content)
159+
completeMessage += content
155160
}
156161

157162
if (chunk.choices[0]?.delta?.tool_calls) {
158-
const toolCalls = chunk.choices[0].delta.tool_calls;
163+
const toolCalls = chunk.choices[0].delta.tool_calls
159164
if (Array.isArray(toolCalls) && toolCalls.length > 0) {
160-
if (toolCalls[0].function?.name === "book_meeting_call_appointment") {
161-
controller.enqueue("BOOK_MEETING");
165+
if (
166+
toolCalls[0].function?.name === 'book_meeting_call_appointment'
167+
) {
168+
controller.enqueue('BOOK_MEETING')
162169
}
163170
}
164171
}

0 commit comments

Comments
 (0)