diff --git a/README.md b/README.md index d10983c..9990eb1 100644 --- a/README.md +++ b/README.md @@ -9,12 +9,13 @@ The conversations will also be printed out in the terminal, so you can use it in ## Features * Support different LLM provider - * Github Copilot - * OpenAI - * AnythingLLM - * Ollama - * Mistral - * Anthropic + * [Github Copilot](https://github.com/features/copilot) + * [OpenAI](https://openai.com) + * [LocalAI](https://localai.io/) + * [AnythingLLM](https://anythingllm.com/) + * [Ollama](https://ollama.com/) + * [Mistral](https://mistral.ai/) + * [Anthropic](https://www.anthropic.com/) * Scriptable * All settings can be set via command line arguments * The users questions and models answers will be printed out in the terminal @@ -23,9 +24,9 @@ The conversations will also be printed out in the terminal, so you can use it in 1. Install dependencies [see wails documentation](https://wails.io/docs/gettingstarted/installation) 2. Build the application: - ```sh - wails build - ``` +```sh +wails build +``` ## Contributing diff --git a/config/config.go b/config/config.go index 0174e51..1e898f0 100644 --- a/config/config.go +++ b/config/config.go @@ -13,6 +13,7 @@ import ( const ( BackendCopilot = "copilot" + BackendLocalAI = "localai" BackendOpenAI = "openai" BackendAnythingLLM = "anythingllm" BackendOllama = "ollama" @@ -37,6 +38,7 @@ type Config struct { UI UIConfig Backend string + LocalAI LocalAIConfig OpenAI OpenAIConfig AnythingLLM AnythingLLMConfig Ollama OllamaConfig @@ -124,6 +126,7 @@ func Parse(arguments []string) *Config { flag.StringVar(&c.Backend, "backend", BackendCopilot, fmt.Sprintf("The backend to use ('%s', '%s', '%s', '%s', '%s', '%s')", BackendCopilot, BackendOpenAI, BackendAnythingLLM, BackendOllama, BackendMistral, BackendAnthropic)) + configureLocalai(&c.LocalAI) configureOpenai(&c.OpenAI) configureAnythingLLM(&c.AnythingLLM) configureOllama(&c.Ollama) @@ -219,6 +222,10 @@ func (c Config) Validate() error { if !llms.IsCopilotInstalled() { return fmt.Errorf("GitHub Copilot is not installed") } + case BackendLocalAI: + if ve := c.LocalAI.Validate(); ve != nil { + return ve + } case BackendOpenAI: if ve := c.OpenAI.Validate(); ve != nil { return ve diff --git a/config/localai.go b/config/localai.go new file mode 100644 index 0000000..d49c132 --- /dev/null +++ b/config/localai.go @@ -0,0 +1,49 @@ +package config + +import ( + "flag" + "fmt" + "github.com/tmc/langchaingo/llms/openai" +) + +type LocalAIConfig struct { + APIKey string + Model string + BaseUrl string +} + +func configureLocalai(c *LocalAIConfig) { + flag.StringVar(&c.APIKey, "localai-api-key", "", "LocalAI API Key") + flag.StringVar(&c.Model, "localai-model", "", "LocalAI chat model") + flag.StringVar(&c.BaseUrl, "localai-base-url", "", "LocalAI API Base-URL") +} + +func (c *LocalAIConfig) AsOptions() (opts []openai.Option) { + if c.APIKey != "" { + opts = append(opts, openai.WithToken(c.APIKey)) + } else { + // the underlying openai implementation wants to have an API key + // so we'll just use a placeholder here + // LocalAI doesn't need an API key and don't care about it + opts = append(opts, openai.WithToken("PLACEHOLDER")) + } + if c.Model != "" { + opts = append(opts, openai.WithModel(c.Model)) + } + if c.BaseUrl != "" { + opts = append(opts, openai.WithBaseURL(c.BaseUrl)) + } + + return +} + +func (c *LocalAIConfig) Validate() error { + if c.BaseUrl == "" { + return fmt.Errorf("LocalAI Base URL is missing") + } + if c.Model == "" { + return fmt.Errorf("LocalAI Model is missing") + } + + return nil +} diff --git a/controller/build.go b/controller/build.go index 86599a1..da03673 100644 --- a/controller/build.go +++ b/controller/build.go @@ -35,6 +35,10 @@ func BuildFromConfig(cfg *config.Config) (ctrl *Controller, err error) { switch cfg.Backend { case config.BackendCopilot: ctrl.aiModel, err = llms.NewCopilot() + case config.BackendLocalAI: + ctrl.aiModel, err = llms.NewLocalAI( + cfg.LocalAI.AsOptions(), + ) case config.BackendOpenAI: ctrl.aiModel, err = llms.NewOpenAI( cfg.OpenAI.AsOptions(), diff --git a/llms/localai.go b/llms/localai.go new file mode 100644 index 0000000..06b975f --- /dev/null +++ b/llms/localai.go @@ -0,0 +1,18 @@ +package llms + +import ( + "fmt" + "github.com/tmc/langchaingo/llms/openai" +) + +func NewLocalAI(opts []openai.Option) (Model, error) { + // LocalAI aims to provide the same rest interface as OpenAI + // So that is the reason we use the OpenAI implementation here + + result, err := NewOpenAI(opts) + if err != nil { + return nil, fmt.Errorf("error creating LocalAI LLM: %w", err) + } + + return result, nil +}