Skip to content

Commit

Permalink
v0.3.0 (#221)
Browse files Browse the repository at this point in the history
* Feat add tools (#201)

* fix: openai WithTools pointer receiver (#204)

* fix: use pointer receiver

* chore: add assistant execute

* chore: change the way assistant actually works

* chore: rename tools in tool

* docs: update docs

* Refactor ssistant observer (#206)

* refactor assistant observer

* fix: linting

* fix: assistant observer

* fix: linting

* fix: serpapi response parsing

* fix: tools

* fix: use json as default output and parse embeddings

* fix

* fix ollama embed error

* fix lint
  • Loading branch information
henomis authored Nov 2, 2024
1 parent 3930e2b commit a428b30
Show file tree
Hide file tree
Showing 43 changed files with 1,502 additions and 61 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,5 @@ bin/
llama.cpp/
whisper.cpp/

*/.hugo_build.lock
*/.hugo_build.lock
docs/public/
75 changes: 69 additions & 6 deletions assistant/assistant.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package assistant

import (
"context"
"fmt"
"strings"

obs "github.com/henomis/lingoose/observer"
Expand All @@ -22,11 +23,16 @@ type observer interface {
SpanEnd(s *obs.Span) (*obs.Span, error)
}

const (
DefaultMaxIterations = 3
)

type Assistant struct {
llm LLM
rag RAG
thread *thread.Thread
parameters Parameters
llm LLM
rag RAG
thread *thread.Thread
parameters Parameters
maxIterations uint
}

type LLM interface {
Expand All @@ -48,6 +54,7 @@ func New(llm LLM) *Assistant {
CompanyName: defaultCompanyName,
CompanyDescription: defaultCompanyDescription,
},
maxIterations: DefaultMaxIterations,
}

return assistant
Expand Down Expand Up @@ -83,14 +90,41 @@ func (a *Assistant) Run(ctx context.Context) error {
if errGenerate != nil {
return errGenerate
}
} else {
a.injectSystemMessage()
}

for i := 0; i < int(a.maxIterations); i++ {
err = a.runIteration(ctx, i)
if err != nil {
return err
}

if a.thread.LastMessage().Role != thread.RoleTool {
break
}
}

err = a.stopObserveSpan(ctx, spanAssistant)
if err != nil {
return err
}

return nil
}

func (a *Assistant) runIteration(ctx context.Context, iteration int) error {
ctx, spanIteration, err := a.startObserveSpan(ctx, fmt.Sprintf("iteration-%d", iteration+1))
if err != nil {
return err
}

err = a.llm.Generate(ctx, a.thread)
if err != nil {
return err
}

err = a.stopObserveSpan(ctx, spanAssistant)
err = a.stopObserveSpan(ctx, spanIteration)
if err != nil {
return err
}
Expand Down Expand Up @@ -123,7 +157,7 @@ func (a *Assistant) generateRAGMessage(ctx context.Context) error {

a.thread.AddMessage(thread.NewSystemMessage().AddContent(
thread.NewTextContent(
systemRAGPrompt,
systemPrompt,
).Format(
types.M{
"assistantName": a.parameters.AssistantName,
Expand All @@ -147,6 +181,11 @@ func (a *Assistant) generateRAGMessage(ctx context.Context) error {
return nil
}

func (a *Assistant) WithMaxIterations(maxIterations uint) *Assistant {
a.maxIterations = maxIterations
return a
}

func (a *Assistant) startObserveSpan(ctx context.Context, name string) (context.Context, *obs.Span, error) {
o, ok := obs.ContextValueObserverInstance(ctx).(observer)
if o == nil || !ok {
Expand Down Expand Up @@ -183,3 +222,27 @@ func (a *Assistant) stopObserveSpan(ctx context.Context, span *obs.Span) error {
_, err := o.SpanEnd(span)
return err
}

func (a *Assistant) injectSystemMessage() {
for _, message := range a.thread.Messages {
if message.Role == thread.RoleSystem {
return
}
}

systemMessage := thread.NewSystemMessage().AddContent(
thread.NewTextContent(
systemPrompt,
).Format(
types.M{
"assistantName": a.parameters.AssistantName,
"assistantIdentity": a.parameters.AssistantIdentity,
"assistantScope": a.parameters.AssistantScope,
"companyName": a.parameters.CompanyName,
"companyDescription": a.parameters.CompanyDescription,
},
),
)

a.thread.Messages = append([]*thread.Message{systemMessage}, a.thread.Messages...)
}
2 changes: 1 addition & 1 deletion assistant/prompt.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ const (
//nolint:lll
baseRAGPrompt = "Use the following pieces of retrieved context to answer the question.\n\nQuestion: {{.question}}\nContext:\n{{range .results}}{{.}}\n\n{{end}}"
//nolint:lll
systemRAGPrompt = "You name is {{.assistantName}}, and you are {{.assistantIdentity}} {{if ne .companyName \"\" }}at {{.companyName}}{{end}}{{if ne .companyDescription \"\" }}, {{.companyDescription}}{{end}}. Your task is to assist humans {{.assistantScope}}."
systemPrompt = "{{if ne .assistantName \"\"}}You name is {{.assistantName}}, {{end}}{{if ne .assistantIdentity \"\"}}you are {{.assistantIdentity}}.{{end}} {{if ne .companyName \"\" }}at {{.companyName}}{{end}}{{if ne .companyDescription \"\" }}, {{.companyDescription}}.{{end}} Your task is to assist humans {{.assistantScope}}."

defaultAssistantName = "AI assistant"
defaultAssistantIdentity = "a helpful and polite assistant"
Expand Down
35 changes: 34 additions & 1 deletion docs/content/reference/assistant.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,4 +31,37 @@ if err != nil {
fmt.Println(myAssistant.Thread())
```

We can define the LinGoose `Assistant` as a `Thread` runner with an optional `RAG` component that will help to produce the response.
We can define the LinGoose `Assistant` as a `Thread` runner with an optional `RAG` component that will help to produce the response.

## Assistant as Agent

The `Assistant` can be used as an agent in a conversation. It can be used to automate tasks, answer questions, and provide information.

```go
auto := "auto"
myAgent := assistant.New(
openai.New().WithModel(openai.GPT4o).WithToolChoice(&auto).WithTools(
pythontool.New(),
serpapitool.New(),
),
).WithParameters(
assistant.Parameters{
AssistantName: "AI Assistant",
AssistantIdentity: "an helpful assistant",
AssistantScope: "with their questions.",
CompanyName: "",
CompanyDescription: "",
},
).WithThread(
thread.New().AddMessages(
thread.NewUserMessage().AddContent(
thread.NewTextContent("calculate the average temperature in celsius degrees of New York, Rome, and Tokyo."),
),
),
).WithMaxIterations(10)

err := myAgent.Run(context.Background())
if err != nil {
panic(err)
}
```
2 changes: 1 addition & 1 deletion docs/content/reference/examples.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
title: "LinGoose Examples"
description:
linkTitle: "Examples"
menu: { main: { parent: 'reference', weight: -88 } }
menu: { main: { parent: 'reference', weight: -87 } }
---

LinGoose provides a number of examples to help you get started with building your own AI app. You can use these examples as a reference to understand how to build your own assistant.
Expand Down
2 changes: 1 addition & 1 deletion docs/content/reference/linglet.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
title: "LinGoose Linglets"
description:
linkTitle: "Linglets"
menu: { main: { parent: 'reference', weight: -89 } }
menu: { main: { parent: 'reference', weight: -88 } }
---

Linglets are pre-built LinGoose Assistants with a specific purpose. They are designed to be used as a starting point for building your own AI app. You can use them as a reference to understand how to build your own assistant.
Expand Down
2 changes: 1 addition & 1 deletion docs/content/reference/observer.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
---
title: "Observer"
title: "Observe and Analyze LLM Applications"
description:
linkTitle: "Observer"
menu: { main: { parent: 'reference', weight: -92 } }
Expand Down
52 changes: 52 additions & 0 deletions docs/content/reference/tool.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
---
title: "Performing tasks with Tools"
description:
linkTitle: "Tool"
menu: { main: { parent: 'reference', weight: -89 } }
---

Tools are components that can be used to perform specific tasks. They can be used to automate, answer questions, and provide information. LinGoose offers a variety of tools that can be used to perform different actions.

## Available Tools

- *Python*: It can be used to run Python code and get the output.
- *SerpApi*: It can be used to get search results from Google and other search engines.
- *Dall-e*: It can be used to generate images based on text descriptions.
- *DuckDuckGo*: It can be used to get search results from DuckDuckGo.
- *RAG*: It can be used to retrieve relevant documents based on a query.
- *LLM*: It can be used to generate text based on a prompt.
- *Shell*: It can be used to run shell commands and get the output.


## Using Tools

LinGoose tools can be used to perform specific tasks. Here is an example of using the `Python` and `serpapi` tools to get information and run Python code and get the output.

```go
auto := "auto"
myAgent := assistant.New(
openai.New().WithModel(openai.GPT4o).WithToolChoice(&auto).WithTools(
pythontool.New(),
serpapitool.New(),
),
).WithParameters(
assistant.Parameters{
AssistantName: "AI Assistant",
AssistantIdentity: "an helpful assistant",
AssistantScope: "with their questions.",
CompanyName: "",
CompanyDescription: "",
},
).WithThread(
thread.New().AddMessages(
thread.NewUserMessage().AddContent(
thread.NewTextContent("calculate the average temperature in celsius degrees of New York, Rome, and Tokyo."),
),
),
).WithMaxIterations(10)

err := myAgent.Run(context.Background())
if err != nil {
panic(err)
}
```
35 changes: 23 additions & 12 deletions embedder/llamacpp/llamacpp.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@ package llamacppembedder

import (
"context"
"encoding/json"
"errors"
"os"
"os/exec"
"strconv"
"strings"

"github.com/henomis/lingoose/embedder"
)
Expand All @@ -16,6 +16,16 @@ type LlamaCppEmbedder struct {
modelPath string
}

type output struct {
Object string `json:"object"`
Data []data `json:"data"`
}
type data struct {
Object string `json:"object"`
Index int `json:"index"`
Embedding []float64 `json:"embedding"`
}

func New() *LlamaCppEmbedder {
return &LlamaCppEmbedder{
llamacppPath: "./llama.cpp/embedding",
Expand Down Expand Up @@ -61,7 +71,7 @@ func (l *LlamaCppEmbedder) embed(ctx context.Context, text string) (embedder.Emb
return nil, err
}

llamacppArgs := []string{"-m", l.modelPath, "-p", text}
llamacppArgs := []string{"-m", l.modelPath, "--embd-output-format", "json", "-p", text}
llamacppArgs = append(llamacppArgs, l.llamacppArgs...)

//nolint:gosec
Expand All @@ -74,14 +84,15 @@ func (l *LlamaCppEmbedder) embed(ctx context.Context, text string) (embedder.Emb
}

func parseEmbeddings(str string) (embedder.Embedding, error) {
strSlice := strings.Split(strings.TrimSpace(str), " ")
floatSlice := make([]float64, len(strSlice))
for i, s := range strSlice {
f, err := strconv.ParseFloat(s, 64)
if err != nil {
return nil, err
}
floatSlice[i] = f
var out output
err := json.Unmarshal([]byte(str), &out)
if err != nil {
return nil, err
}
return floatSlice, nil

if len(out.Data) != 1 {
return nil, errors.New("no embeddings found")
}

return out.Data[0].Embedding, nil
}
9 changes: 8 additions & 1 deletion embedder/ollama/api.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ func (r *request) ContentType() string {
type response struct {
HTTPStatusCode int `json:"-"`
acceptContentType string `json:"-"`
RawBody []byte `json:"-"`
Embedding []float64 `json:"embedding"`
CreatedAt string `json:"created_at"`
}
Expand All @@ -46,7 +47,13 @@ func (r *response) Decode(body io.Reader) error {
return json.NewDecoder(body).Decode(r)
}

func (r *response) SetBody(_ io.Reader) error {
func (r *response) SetBody(body io.Reader) error {
rawBody, err := io.ReadAll(body)
if err != nil {
return err
}

r.RawBody = rawBody
return nil
}

Expand Down
17 changes: 17 additions & 0 deletions embedder/ollama/ollama.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@ package ollamaembedder

import (
"context"
"errors"
"fmt"
"net/http"

"github.com/henomis/restclientgo"

Expand All @@ -14,6 +17,14 @@ const (
defaultEndpoint = "http://localhost:11434/api"
)

type OllamaEmbedError struct {
Err error
}

func (e *OllamaEmbedError) Error() string {
return fmt.Sprintf("Error embedding text: %v", e.Err)
}

type Embedder struct {
model string
restClient *restclientgo.RestClient
Expand Down Expand Up @@ -88,5 +99,11 @@ func (e *Embedder) embed(ctx context.Context, text string) (embedder.Embedding,
return nil, err
}

if resp.HTTPStatusCode >= http.StatusBadRequest {
return nil, &OllamaEmbedError{
Err: errors.New(string(resp.RawBody)),
}
}

return resp.Embedding, nil
}
Loading

0 comments on commit a428b30

Please sign in to comment.