Skip to content

Commit

Permalink
add support for LocalAI
Browse files Browse the repository at this point in the history
  • Loading branch information
rainu committed Dec 16, 2024
1 parent 55c4036 commit 74cd66d
Show file tree
Hide file tree
Showing 5 changed files with 88 additions and 9 deletions.
19 changes: 10 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,13 @@ The conversations will also be printed out in the terminal, so you can use it in
## Features

* Support different LLM provider
* Github Copilot
* OpenAI
* AnythingLLM
* Ollama
* Mistral
* Anthropic
* [Github Copilot](https://github.com/features/copilot)
* [OpenAI](https://openai.com)
* [LocalAI](https://localai.io/)
* [AnythingLLM](https://anythingllm.com/)
* [Ollama](https://ollama.com/)
* [Mistral](https://mistral.ai/)
* [Anthropic](https://www.anthropic.com/)
* Scriptable
* All settings can be set via command line arguments
* The users questions and models answers will be printed out in the terminal
Expand All @@ -23,9 +24,9 @@ The conversations will also be printed out in the terminal, so you can use it in

1. Install dependencies [see wails documentation](https://wails.io/docs/gettingstarted/installation)
2. Build the application:
```sh
wails build
```
```sh
wails build
```

## Contributing

Expand Down
7 changes: 7 additions & 0 deletions config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import (

const (
BackendCopilot = "copilot"
BackendLocalAI = "localai"
BackendOpenAI = "openai"
BackendAnythingLLM = "anythingllm"
BackendOllama = "ollama"
Expand All @@ -37,6 +38,7 @@ type Config struct {
UI UIConfig

Backend string
LocalAI LocalAIConfig
OpenAI OpenAIConfig
AnythingLLM AnythingLLMConfig
Ollama OllamaConfig
Expand Down Expand Up @@ -124,6 +126,7 @@ func Parse(arguments []string) *Config {

flag.StringVar(&c.Backend, "backend", BackendCopilot, fmt.Sprintf("The backend to use ('%s', '%s', '%s', '%s', '%s', '%s')", BackendCopilot, BackendOpenAI, BackendAnythingLLM, BackendOllama, BackendMistral, BackendAnthropic))

configureLocalai(&c.LocalAI)
configureOpenai(&c.OpenAI)
configureAnythingLLM(&c.AnythingLLM)
configureOllama(&c.Ollama)
Expand Down Expand Up @@ -219,6 +222,10 @@ func (c Config) Validate() error {
if !llms.IsCopilotInstalled() {
return fmt.Errorf("GitHub Copilot is not installed")
}
case BackendLocalAI:
if ve := c.LocalAI.Validate(); ve != nil {
return ve
}
case BackendOpenAI:
if ve := c.OpenAI.Validate(); ve != nil {
return ve
Expand Down
49 changes: 49 additions & 0 deletions config/localai.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
package config

import (
"flag"
"fmt"
"github.com/tmc/langchaingo/llms/openai"
)

type LocalAIConfig struct {
APIKey string
Model string
BaseUrl string
}

func configureLocalai(c *LocalAIConfig) {
flag.StringVar(&c.APIKey, "localai-api-key", "", "LocalAI API Key")
flag.StringVar(&c.Model, "localai-model", "", "LocalAI chat model")
flag.StringVar(&c.BaseUrl, "localai-base-url", "", "LocalAI API Base-URL")
}

func (c *LocalAIConfig) AsOptions() (opts []openai.Option) {
if c.APIKey != "" {
opts = append(opts, openai.WithToken(c.APIKey))
} else {
// the underlying openai implementation wants to have an API key
// so we'll just use a placeholder here
// LocalAI doesn't need an API key and don't care about it
opts = append(opts, openai.WithToken("PLACEHOLDER"))
}
if c.Model != "" {
opts = append(opts, openai.WithModel(c.Model))
}
if c.BaseUrl != "" {
opts = append(opts, openai.WithBaseURL(c.BaseUrl))
}

return
}

func (c *LocalAIConfig) Validate() error {
if c.BaseUrl == "" {
return fmt.Errorf("LocalAI Base URL is missing")
}
if c.Model == "" {
return fmt.Errorf("LocalAI Model is missing")
}

return nil
}
4 changes: 4 additions & 0 deletions controller/build.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,10 @@ func BuildFromConfig(cfg *config.Config) (ctrl *Controller, err error) {
switch cfg.Backend {
case config.BackendCopilot:
ctrl.aiModel, err = llms.NewCopilot()
case config.BackendLocalAI:
ctrl.aiModel, err = llms.NewLocalAI(
cfg.LocalAI.AsOptions(),
)
case config.BackendOpenAI:
ctrl.aiModel, err = llms.NewOpenAI(
cfg.OpenAI.AsOptions(),
Expand Down
18 changes: 18 additions & 0 deletions llms/localai.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
package llms

import (
"fmt"
"github.com/tmc/langchaingo/llms/openai"
)

func NewLocalAI(opts []openai.Option) (Model, error) {
// LocalAI aims to provide the same rest interface as OpenAI
// So that is the reason we use the OpenAI implementation here

result, err := NewOpenAI(opts)
if err != nil {
return nil, fmt.Errorf("error creating LocalAI LLM: %w", err)
}

return result, nil
}

0 comments on commit 74cd66d

Please sign in to comment.