Skip to content

Commit

Permalink
feat: hide entry for local ai
Browse files Browse the repository at this point in the history
  • Loading branch information
kangfenmao committed Aug 28, 2024
1 parent d42ee59 commit 89bdab5
Show file tree
Hide file tree
Showing 9 changed files with 118 additions and 55 deletions.
4 changes: 2 additions & 2 deletions src/renderer/src/components/app/Sidebar.tsx
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { TranslationOutlined } from '@ant-design/icons'
import { isMac } from '@renderer/config/constant'
import { AppLogo } from '@renderer/config/env'
import { AppLogo, isLocalAi } from '@renderer/config/env'
import useAvatar from '@renderer/hooks/useAvatar'
import { useRuntime } from '@renderer/hooks/useStore'
import { Avatar } from 'antd'
Expand Down Expand Up @@ -51,7 +51,7 @@ const Sidebar: FC = () => {
</Menus>
</MainMenus>
<Menus>
<StyledLink to="/settings/provider">
<StyledLink to={isLocalAi ? '/settings/assistant' : '/settings/provider'}>
<Icon className={pathname.startsWith('/settings') ? 'active' : ''}>
<i className="iconfont icon-setting"></i>
</Icon>
Expand Down
10 changes: 1 addition & 9 deletions src/renderer/src/config/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,7 @@ import { Model } from '@renderer/types'
type SystemModel = Model & { enabled: boolean }

export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
ollama: [
{
id: 'qwen2:0.5b',
provider: 'ollama',
name: ' Qwen2 0.5b',
group: 'Qwen2',
enabled: true
}
],
ollama: [],
openai: [
{
id: 'gpt-4o',
Expand Down
8 changes: 8 additions & 0 deletions src/renderer/src/env.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,14 @@ import type KeyvStorage from '@kangfenmao/keyv-storage'
import { MessageInstance } from 'antd/es/message/interface'
import { HookAPI } from 'antd/es/modal/useModal'

interface ImportMetaEnv {
VITE_RENDERER_INTEGRATED_MODEL: string
}

interface ImportMeta {
readonly env: ImportMetaEnv
}

declare global {
interface Window {
message: MessageInstance
Expand Down
13 changes: 13 additions & 0 deletions src/renderer/src/hooks/useAppInit.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,19 @@
import { isLocalAi } from '@renderer/config/env'
import i18n from '@renderer/i18n'
import LocalStorage from '@renderer/services/storage'
import { useAppDispatch } from '@renderer/store'
import { setAvatar } from '@renderer/store/runtime'
import { runAsyncFunction } from '@renderer/utils'
import { useEffect } from 'react'

import { useDefaultModel } from './useAssistant'
import { useSettings } from './useSettings'

export function useAppInit() {
const dispatch = useAppDispatch()
const { proxyUrl } = useSettings()
const { language } = useSettings()
const { setDefaultModel, setTopicNamingModel, setTranslateModel } = useDefaultModel()

useEffect(() => {
runAsyncFunction(async () => {
Expand All @@ -33,4 +36,14 @@ export function useAppInit() {
useEffect(() => {
i18n.changeLanguage(language || navigator.language || 'en-US')
}, [language])

useEffect(() => {
if (isLocalAi) {
const model = JSON.parse(import.meta.env.VITE_RENDERER_INTEGRATED_MODEL)
setDefaultModel(model)
setTopicNamingModel(model)
setTranslateModel(model)
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [])
}
5 changes: 5 additions & 0 deletions src/renderer/src/pages/home/components/SelectModelButton.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import ModelAvatar from '@renderer/components/Avatar/ModelAvatar'
import { isLocalAi } from '@renderer/config/env'
import { useAssistant } from '@renderer/hooks/useAssistant'
import { Assistant } from '@renderer/types'
import { Button } from 'antd'
Expand All @@ -17,6 +18,10 @@ const SelectModelButton: FC<Props> = ({ assistant }) => {
const { model, setModel } = useAssistant(assistant.id)
const { t } = useTranslation()

if (isLocalAi) {
return null
}

return (
<SelectModelDropdown model={model} onSelect={setModel}>
<DropdownButton size="small" type="default">
Expand Down
29 changes: 17 additions & 12 deletions src/renderer/src/pages/settings/SettingsPage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import {
SettingOutlined
} from '@ant-design/icons'
import { Navbar, NavbarCenter } from '@renderer/components/app/Navbar'
import { isLocalAi } from '@renderer/config/env'
import { FC } from 'react'
import { useTranslation } from 'react-i18next'
import { Link, Route, Routes, useLocation } from 'react-router-dom'
Expand All @@ -30,18 +31,22 @@ const SettingsPage: FC = () => {
</Navbar>
<ContentContainer>
<SettingMenus>
<MenuItemLink to="/settings/provider">
<MenuItem className={isRoute('/settings/provider')}>
<CloudOutlined />
{t('settings.provider')}
</MenuItem>
</MenuItemLink>
<MenuItemLink to="/settings/model">
<MenuItem className={isRoute('/settings/model')}>
<CodeSandboxOutlined />
{t('settings.model')}
</MenuItem>
</MenuItemLink>
{!isLocalAi && (
<>
<MenuItemLink to="/settings/provider">
<MenuItem className={isRoute('/settings/provider')}>
<CloudOutlined />
{t('settings.provider')}
</MenuItem>
</MenuItemLink>
<MenuItemLink to="/settings/model">
<MenuItem className={isRoute('/settings/model')}>
<CodeSandboxOutlined />
{t('settings.model')}
</MenuItem>
</MenuItemLink>
</>
)}
<MenuItemLink to="/settings/assistant">
<MenuItem className={isRoute('/settings/assistant')}>
<MessageOutlined />
Expand Down
42 changes: 27 additions & 15 deletions src/renderer/src/pages/translate/TranslatePage.tsx
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { CheckOutlined, SendOutlined, SettingOutlined, SwapOutlined, WarningOutlined } from '@ant-design/icons'
import { Navbar, NavbarCenter } from '@renderer/components/app/Navbar'
import CopyIcon from '@renderer/components/Icons/CopyIcon'
import { isLocalAi } from '@renderer/config/env'
import { useDefaultModel } from '@renderer/hooks/useAssistant'
import { fetchTranslate } from '@renderer/services/api'
import { getDefaultAssistant } from '@renderer/services/assistant'
Expand Down Expand Up @@ -133,6 +134,31 @@ const TranslatePage: FC = () => {
isEmpty(text) && setResult('')
}, [text])

const SettingButton = () => {
if (isLocalAi) {
return null
}

if (translateModel) {
return (
<Link to="/settings/model" style={{ color: 'var(--color-text-2)' }}>
<SettingOutlined />
</Link>
)
}

return (
<Link to="/settings/model" style={{ marginLeft: -10 }}>
<Button
type="link"
style={{ color: 'var(--color-error)', textDecoration: 'underline' }}
icon={<WarningOutlined />}>
{t('translate.error.not_configured')}
</Button>
</Link>
)
}

return (
<Container>
<Navbar>
Expand Down Expand Up @@ -165,21 +191,7 @@ const TranslatePage: FC = () => {
</Space>
)}
/>
{translateModel && (
<Link to="/settings/model" style={{ color: 'var(--color-text-2)' }}>
<SettingOutlined />
</Link>
)}
{!translateModel && (
<Link to="/settings/model" style={{ marginLeft: -10 }}>
<Button
type="link"
style={{ color: 'var(--color-error)', textDecoration: 'underline' }}
icon={<WarningOutlined />}>
{t('translate.error.not_configured')}
</Button>
</Link>
)}
<SettingButton />
</MenuContainer>
<TranslateInputWrapper>
<InputContainer>
Expand Down
7 changes: 4 additions & 3 deletions src/renderer/src/services/ProviderSDK.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@ import Anthropic from '@anthropic-ai/sdk'
import { MessageCreateParamsNonStreaming, MessageParam } from '@anthropic-ai/sdk/resources'
import { GoogleGenerativeAI } from '@google/generative-ai'
import { DEFAULT_MAX_TOKENS } from '@renderer/config/constant'
import { isLocalAi } from '@renderer/config/env'
import { getOllamaKeepAliveTime } from '@renderer/hooks/useOllama'
import { Assistant, Message, Provider, Suggestion } from '@renderer/types'
import { removeQuotes } from '@renderer/utils'
import axios from 'axios'
import { isEmpty, sum, takeRight } from 'lodash'
import { first, isEmpty, sum, takeRight } from 'lodash'
import OpenAI from 'openai'
import { ChatCompletionCreateParamsNonStreaming, ChatCompletionMessageParam } from 'openai/resources'

Expand Down Expand Up @@ -239,13 +240,13 @@ export default class ProviderSDK {
// @ts-ignore key is not typed
const response = await this.openaiSdk.chat.completions.create({
model: model.id,
messages: [systemMessage, ...userMessages] as ChatCompletionMessageParam[],
messages: [systemMessage, ...(isLocalAi ? [first(userMessages)] : userMessages)] as ChatCompletionMessageParam[],
stream: false,
max_tokens: 50,
keep_alive: this.keepAliveTime
})

return removeQuotes(response.choices[0].message?.content || '')
return removeQuotes(response.choices[0].message?.content?.substring(0, 50) || '')
}

public async suggestions(messages: Message[], assistant: Assistant): Promise<Suggestion[]> {
Expand Down
55 changes: 41 additions & 14 deletions src/renderer/src/store/llm.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { createSlice, PayloadAction } from '@reduxjs/toolkit'
import { isLocalAi } from '@renderer/config/env'
import { SYSTEM_MODELS } from '@renderer/config/models'
import { Model, Provider } from '@renderer/types'
import { uniqBy } from 'lodash'
Expand All @@ -18,27 +19,18 @@ export interface LlmState {
}

const initialState: LlmState = {
defaultModel: SYSTEM_MODELS.ollama[0],
topicNamingModel: SYSTEM_MODELS.ollama[0],
translateModel: SYSTEM_MODELS.ollama[0],
defaultModel: SYSTEM_MODELS.openai[0],
topicNamingModel: SYSTEM_MODELS.openai[0],
translateModel: SYSTEM_MODELS.openai[0],
providers: [
{
id: 'ollama',
name: 'Ollama',
apiKey: '',
apiHost: 'http://localhost:11434/v1/',
models: SYSTEM_MODELS.ollama.filter((m) => m.enabled),
isSystem: true,
enabled: true
},
{
id: 'openai',
name: 'OpenAI',
apiKey: '',
apiHost: 'https://api.openai.com',
models: SYSTEM_MODELS.openai.filter((m) => m.enabled),
isSystem: true,
enabled: false
enabled: true
},
{
id: 'gemini',
Expand All @@ -58,6 +50,15 @@ const initialState: LlmState = {
isSystem: true,
enabled: false
},
{
id: 'ollama',
name: 'Ollama',
apiKey: '',
apiHost: 'http://localhost:11434/v1/',
models: SYSTEM_MODELS.ollama.filter((m) => m.enabled),
isSystem: true,
enabled: false
},
{
id: 'silicon',
name: 'Silicon',
Expand Down Expand Up @@ -192,9 +193,35 @@ const initialState: LlmState = {
}
}

const getIntegratedInitialState = () => {
const model = JSON.parse(import.meta.env.VITE_RENDERER_INTEGRATED_MODEL)

return {
defaultModel: model,
topicNamingModel: model,
translateModel: model,
providers: [
{
id: 'ollama',
name: 'Ollama',
apiKey: 'ollama',
apiHost: 'http://localhost:15537/v1/',
models: [model],
isSystem: true,
enabled: true
}
],
settings: {
ollama: {
keepAliveTime: 3600
}
}
} as LlmState
}

const settingsSlice = createSlice({
name: 'llm',
initialState,
initialState: isLocalAi ? getIntegratedInitialState() : initialState,
reducers: {
updateProvider: (state, action: PayloadAction<Provider>) => {
state.providers = state.providers.map((p) => (p.id === action.payload.id ? { ...p, ...action.payload } : p))
Expand Down

0 comments on commit 89bdab5

Please sign in to comment.