diff --git a/app/constant.ts b/app/constant.ts index de75d618daf..c1e97bd8cb2 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -73,6 +73,7 @@ export const EXPORT_MESSAGE_CLASS_NAME = "export-markdown"; export enum ServiceProvider { OpenAI = "OpenAI", Azure = "Azure", + Variant = "Variant", } export const OpenaiPath = { diff --git a/app/store/access.ts b/app/store/access.ts index 17d0f24ca8b..e800bed7d0f 100644 --- a/app/store/access.ts +++ b/app/store/access.ts @@ -18,15 +18,15 @@ const DEFAULT_ACCESS_STATE = { accessCode: "", useCustomConfig: false, - provider: ServiceProvider.Azure, + provider: ServiceProvider.Variant, // openai openaiUrl: DEFAULT_OPENAI_URL, openaiApiKey: "", // azure - azureUrl: process.env.AZURE_OPENAI_API_BASE ?? "", - azureApiKey: process.env.AZURE_API_KEY ?? "", + azureUrl: "", + azureApiKey: "", azureApiVersion: "2023-08-01-preview", // server config @@ -53,6 +53,10 @@ export const useAccessStore = createPersistStore( }, isValidAzure() { + const test = ensure(get(), ["azureUrl", "azureApiKey", "azureApiVersion"]) + ? "Ja dette er veldig riktig" + : "nei dette ble feil"; + console.log(get().azureUrl); return ensure(get(), ["azureUrl", "azureApiKey", "azureApiVersion"]); }, diff --git a/app/store/config.ts b/app/store/config.ts index 0b5ed9f893b..c0f09c081e6 100644 --- a/app/store/config.ts +++ b/app/store/config.ts @@ -48,7 +48,7 @@ export const DEFAULT_CONFIG = { models: DEFAULT_MODELS as any as LLMModel[], modelConfig: { - model: "gpt-3.5-turbo" as ModelType, + model: "variant-rocks" as ModelType, temperature: 0.5, top_p: 1, max_tokens: 4000,