Skip to content

Commit

Permalink
dont delete anythingllm chat-thread after shutting down - can be conf…
Browse files Browse the repository at this point in the history
…igured
  • Loading branch information
rainu committed Jan 25, 2025
1 parent 42c16b1 commit 67ed5e0
Show file tree
Hide file tree
Showing 11 changed files with 127 additions and 69 deletions.
3 changes: 3 additions & 0 deletions config/default.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ func defaultConfig() *Config {
TopK: -1,
TopP: -1,
},
AnythingLLM: llm.AnythingLLMConfig{
DeleteThread: false,
},
OpenAI: llm.OpenAIConfig{
APIType: string(openai.APITypeOpenAI),
Model: "gpt-4o-mini",
Expand Down
5 changes: 5 additions & 0 deletions config/llm/anthropic.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package llm

import (
"fmt"
"github.com/rainu/ask-mai/llms"
"github.com/tmc/langchaingo/llms/anthropic"
)

Expand Down Expand Up @@ -35,3 +36,7 @@ func (c *AnthropicConfig) Validate() error {

return nil
}

func (c *AnthropicConfig) BuildLLM() (llms.Model, error) {
return llms.NewAnthropic(c.AsOptions())
}
17 changes: 14 additions & 3 deletions config/llm/anythingllm.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,14 @@ package llm

import (
"fmt"
"github.com/rainu/ask-mai/llms"
)

type AnythingLLMConfig struct {
BaseURL string `yaml:"base-url" usage:"Base URL"`
Token string `yaml:"token" usage:"Token"`
Workspace string `yaml:"workspace" usage:"Workspace"`
BaseURL string `yaml:"base-url" usage:"Base URL"`
Token string `yaml:"token" usage:"Token"`
Workspace string `yaml:"workspace" usage:"Workspace"`
DeleteThread bool `yaml:"delete-thread" usage:"Delete the thread after the session is closed"`
}

func (c *AnythingLLMConfig) Validate() error {
Expand All @@ -23,3 +25,12 @@ func (c *AnythingLLMConfig) Validate() error {

return nil
}

func (c *AnythingLLMConfig) BuildLLM() (llms.Model, error) {
return llms.NewAnythingLLM(
c.BaseURL,
c.Token,
c.Workspace,
c.DeleteThread,
)
}
20 changes: 20 additions & 0 deletions config/llm/copilot.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
package llm

import (
"fmt"
"github.com/rainu/ask-mai/llms"
)

type CopilotConfig struct {
}

func (c *CopilotConfig) Validate() error {
if !llms.IsCopilotInstalled() {
return fmt.Errorf("GitHub Copilot is not installed")
}
return nil
}

func (c *CopilotConfig) BuildLLM() (llms.Model, error) {
return llms.NewCopilot()
}
96 changes: 35 additions & 61 deletions config/llm/general.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,15 @@ const (
BackendAnthropic = "anthropic"
)

type llmConfig interface {
BuildLLM() (llms.Model, error)
Validate() error
}

type LLMConfig struct {
Backend string `yaml:"backend" short:"b"`

Copilot CopilotConfig `yaml:"copilot" usage:"Copilot: "`
LocalAI LocalAIConfig `yaml:"localai" usage:"LocalAI: "`
OpenAI OpenAIConfig `yaml:"openai" usage:"OpenAI: "`
AnythingLLM AnythingLLMConfig `yaml:"anythingllm" usage:"AnythingLLM: "`
Expand All @@ -27,48 +33,43 @@ type LLMConfig struct {
CallOptions CallOptionsConfig `yaml:"call" usage:"LLM-CALL: "`
}

func (c *LLMConfig) GetUsage(field string) string {
switch field {
case "Backend":
return fmt.Sprintf("The backend to use ('%s', '%s', '%s', '%s', '%s', '%s', '%s')", BackendCopilot, BackendOpenAI, BackendLocalAI, BackendAnythingLLM, BackendOllama, BackendMistral, BackendAnthropic)
}
return ""
}

func (c *LLMConfig) Validate() error {
func (c *LLMConfig) getBackend() llmConfig {
switch c.Backend {
case BackendCopilot:
if !llms.IsCopilotInstalled() {
return fmt.Errorf("GitHub Copilot is not installed")
}
return &c.Copilot
case BackendLocalAI:
if ve := c.LocalAI.Validate(); ve != nil {
return ve
}
return &c.LocalAI
case BackendOpenAI:
if ve := c.OpenAI.Validate(); ve != nil {
return ve
}
return &c.OpenAI
case BackendAnythingLLM:
if ve := c.AnythingLLM.Validate(); ve != nil {
return ve
}
return &c.AnythingLLM
case BackendOllama:
if ve := c.Ollama.Validate(); ve != nil {
return ve
}
return &c.Ollama
case BackendMistral:
if ve := c.Mistral.Validate(); ve != nil {
return ve
}
return &c.Mistral
case BackendAnthropic:
if ve := c.Anthropic.Validate(); ve != nil {
return ve
}
return &c.Anthropic
default:
return fmt.Errorf("Invalid backend")
return nil
}
}

func (c *LLMConfig) GetUsage(field string) string {
switch field {
case "Backend":
return fmt.Sprintf("The backend to use ('%s', '%s', '%s', '%s', '%s', '%s', '%s')", BackendCopilot, BackendOpenAI, BackendLocalAI, BackendAnythingLLM, BackendOllama, BackendMistral, BackendAnthropic)
}
return ""
}

func (c *LLMConfig) Validate() error {
b := c.getBackend()
if b == nil {
return fmt.Errorf("Invalid backend %s", c.Backend)
}
if ve := b.Validate(); ve != nil {
return ve
}
if ve := c.CallOptions.Validate(); ve != nil {
return ve
}
Expand All @@ -77,36 +78,9 @@ func (c *LLMConfig) Validate() error {
}

func (c *LLMConfig) BuildLLM() (llms.Model, error) {
switch c.Backend {
case BackendCopilot:
return llms.NewCopilot()
case BackendLocalAI:
return llms.NewLocalAI(
c.LocalAI.AsOptions(),
)
case BackendOpenAI:
return llms.NewOpenAI(
c.OpenAI.AsOptions(),
)
case BackendAnythingLLM:
return llms.NewAnythingLLM(
c.AnythingLLM.BaseURL,
c.AnythingLLM.Token,
c.AnythingLLM.Workspace,
)
case BackendOllama:
return llms.NewOllama(
c.Ollama.AsOptions(),
)
case BackendMistral:
return llms.NewMistral(
c.Mistral.AsOptions(),
)
case BackendAnthropic:
return llms.NewAnthropic(
c.Anthropic.AsOptions(),
)
default:
b := c.getBackend()
if b == nil {
return nil, fmt.Errorf("unknown backend: %s", c.Backend)
}
return b.BuildLLM()
}
5 changes: 5 additions & 0 deletions config/llm/localai.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package llm

import (
"fmt"
"github.com/rainu/ask-mai/llms"
"github.com/tmc/langchaingo/llms/openai"
)

Expand Down Expand Up @@ -40,3 +41,7 @@ func (c *LocalAIConfig) Validate() error {

return nil
}

func (c *LocalAIConfig) BuildLLM() (llms.Model, error) {
return llms.NewLocalAI(c.AsOptions())
}
5 changes: 5 additions & 0 deletions config/llm/mistral.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package llm

import (
"fmt"
"github.com/rainu/ask-mai/llms"
"github.com/tmc/langchaingo/llms/mistral"
)

Expand Down Expand Up @@ -32,3 +33,7 @@ func (c *MistralConfig) Validate() error {

return nil
}

func (c *MistralConfig) BuildLLM() (llms.Model, error) {
return llms.NewMistral(c.AsOptions())
}
5 changes: 5 additions & 0 deletions config/llm/ollama.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package llm

import (
"fmt"
"github.com/rainu/ask-mai/llms"
"github.com/tmc/langchaingo/llms/ollama"
)

Expand Down Expand Up @@ -31,3 +32,7 @@ func (c *OllamaConfig) Validate() error {

return nil
}

func (c *OllamaConfig) BuildLLM() (llms.Model, error) {
return llms.NewOllama(c.AsOptions())
}
5 changes: 5 additions & 0 deletions config/llm/openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package llm

import (
"fmt"
"github.com/rainu/ask-mai/llms"
"github.com/tmc/langchaingo/llms/openai"
)

Expand Down Expand Up @@ -56,3 +57,7 @@ func (c *OpenAIConfig) Validate() error {

return nil
}

func (c *OpenAIConfig) BuildLLM() (llms.Model, error) {
return llms.NewOpenAI(c.AsOptions())
}
17 changes: 17 additions & 0 deletions frontend/wailsjs/go/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -486,6 +486,7 @@ export namespace llm {
BaseURL: string;
Token: string;
Workspace: string;
DeleteThread: boolean;

static createFrom(source: any = {}) {
return new AnythingLLMConfig(source);
Expand All @@ -496,6 +497,7 @@ export namespace llm {
this.BaseURL = source["BaseURL"];
this.Token = source["Token"];
this.Workspace = source["Workspace"];
this.DeleteThread = source["DeleteThread"];
}
}
export class CallOptionsConfig {
Expand Down Expand Up @@ -590,8 +592,22 @@ export namespace llm {
this.BaseUrl = source["BaseUrl"];
}
}
export class CopilotConfig {


static createFrom(source: any = {}) {
return new CopilotConfig(source);
}

constructor(source: any = {}) {
if ('string' === typeof source) source = JSON.parse(source);

}
}
export class LLMConfig {
Backend: string;
// Go type: CopilotConfig
Copilot: any;
LocalAI: LocalAIConfig;
OpenAI: OpenAIConfig;
AnythingLLM: AnythingLLMConfig;
Expand All @@ -607,6 +623,7 @@ export namespace llm {
constructor(source: any = {}) {
if ('string' === typeof source) source = JSON.parse(source);
this.Backend = source["Backend"];
this.Copilot = this.convertValues(source["Copilot"], null);
this.LocalAI = this.convertValues(source["LocalAI"], LocalAIConfig);
this.OpenAI = this.convertValues(source["OpenAI"], OpenAIConfig);
this.AnythingLLM = this.convertValues(source["AnythingLLM"], AnythingLLMConfig);
Expand Down
18 changes: 13 additions & 5 deletions llms/anythingllm.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,12 @@ import (
type AnythingLLM struct {
client *http.Client

token string
baseURL string
workspace string
threadSlug string
token string
baseURL string
workspace string

threadSlug string
threadDelete bool
}

type chatRequest struct {
Expand Down Expand Up @@ -57,13 +59,15 @@ type threadResponse struct {
Message *string `json:"message"`
}

func NewAnythingLLM(baseURL, token, workspace string) (Model, error) {
func NewAnythingLLM(baseURL, token, workspace string, deleteThread bool) (Model, error) {
result := &AnythingLLM{
client: &http.Client{},

token: token,
baseURL: baseURL,
workspace: workspace,

threadDelete: deleteThread,
}

return result, nil
Expand Down Expand Up @@ -232,6 +236,10 @@ func (a *AnythingLLM) deleteThread(ctx context.Context) error {
}

func (a *AnythingLLM) Close() error {
if !a.threadDelete {
return nil
}

ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()

Expand Down

0 comments on commit 67ed5e0

Please sign in to comment.