-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
5 changed files
with
88 additions
and
9 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
package config | ||
|
||
import ( | ||
"flag" | ||
"fmt" | ||
"github.com/tmc/langchaingo/llms/openai" | ||
) | ||
|
||
type LocalAIConfig struct { | ||
APIKey string | ||
Model string | ||
BaseUrl string | ||
} | ||
|
||
func configureLocalai(c *LocalAIConfig) { | ||
flag.StringVar(&c.APIKey, "localai-api-key", "", "LocalAI API Key") | ||
flag.StringVar(&c.Model, "localai-model", "", "LocalAI chat model") | ||
flag.StringVar(&c.BaseUrl, "localai-base-url", "", "LocalAI API Base-URL") | ||
} | ||
|
||
func (c *LocalAIConfig) AsOptions() (opts []openai.Option) { | ||
if c.APIKey != "" { | ||
opts = append(opts, openai.WithToken(c.APIKey)) | ||
} else { | ||
// the underlying openai implementation wants to have an API key | ||
// so we'll just use a placeholder here | ||
// LocalAI doesn't need an API key and don't care about it | ||
opts = append(opts, openai.WithToken("PLACEHOLDER")) | ||
} | ||
if c.Model != "" { | ||
opts = append(opts, openai.WithModel(c.Model)) | ||
} | ||
if c.BaseUrl != "" { | ||
opts = append(opts, openai.WithBaseURL(c.BaseUrl)) | ||
} | ||
|
||
return | ||
} | ||
|
||
func (c *LocalAIConfig) Validate() error { | ||
if c.BaseUrl == "" { | ||
return fmt.Errorf("LocalAI Base URL is missing") | ||
} | ||
if c.Model == "" { | ||
return fmt.Errorf("LocalAI Model is missing") | ||
} | ||
|
||
return nil | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
package llms | ||
|
||
import ( | ||
"fmt" | ||
"github.com/tmc/langchaingo/llms/openai" | ||
) | ||
|
||
func NewLocalAI(opts []openai.Option) (Model, error) { | ||
// LocalAI aims to provide the same rest interface as OpenAI | ||
// So that is the reason we use the OpenAI implementation here | ||
|
||
result, err := NewOpenAI(opts) | ||
if err != nil { | ||
return nil, fmt.Errorf("error creating LocalAI LLM: %w", err) | ||
} | ||
|
||
return result, nil | ||
} |