diff --git a/.gitignore b/.gitignore index 6836bab0..975b7f7f 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ *.out bin/ .vscode/ +.local/ # Dependency directories (remove the comment below to include it) # vendor/ diff --git a/README.md b/README.md index 9511aaf4..26566a5e 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,15 @@ +![image](./docs/assets/img/lingoose-small.png ) + # 🪿 LinGoose [![Build Status](https://github.com/henomis/lingoose/actions/workflows/test.yml/badge.svg)](https://github.com/henomis/lingoose/actions/workflows/test.yml) [![GoDoc](https://godoc.org/github.com/henomis/lingoose?status.svg)](https://godoc.org/github.com/henomis/lingoose) [![Go Report Card](https://goreportcard.com/badge/github.com/henomis/lingoose)](https://goreportcard.com/report/github.com/henomis/lingoose) [![GitHub release](https://img.shields.io/github/release/henomis/lingoose.svg)](https://github.com/henomis/lingoose/releases) **LinGoose** (_Lingo + Go + Goose_ 🪿) aims to be a complete Go framework for creating LLM apps. 🤖 ⚙️ +> **Did you know?** A goose 🪿 fills its car 🚗 with goose-line ⛽! + +here below an image from docs/assets/img/lingoose.png + # Overview **LinGoose** is a powerful Go framework for developing Large Language Model (LLM) based applications using pipelines. It is designed to be a complete solution and provides multiple components, including Prompts, Templates, Chat, Output Decoders, LLM, Pipelines, and Memory. With **LinGoose**, you can interact with LLM AI through prompts and generate complex templates. Additionally, it includes a chat feature, allowing you to create chatbots. The Output Decoders component enables you to extract specific information from the output of the LLM, while the LLM interface allows you to send prompts to various AI, such as the ones provided by OpenAI. You can chain multiple LMM steps together using Pipelines and store the output of each step in Memory for later retrieval. diff --git a/docs/assets/img/lingoose-small.png b/docs/assets/img/lingoose-small.png new file mode 100644 index 00000000..b153a844 Binary files /dev/null and b/docs/assets/img/lingoose-small.png differ diff --git a/docs/assets/img/lingoose.png b/docs/assets/img/lingoose.png new file mode 100644 index 00000000..6f33fd7a Binary files /dev/null and b/docs/assets/img/lingoose.png differ diff --git a/docs/css/styles.css b/docs/css/styles.css index 11ef3dbb..9f1c1c4b 100644 --- a/docs/css/styles.css +++ b/docs/css/styles.css @@ -11047,4 +11047,10 @@ html { .big-emoji { font-size: 10rem !important; +} + +blockquote { + border-left: 0.2rem solid #1abc9c; + padding: 1rem; + } \ No newline at end of file diff --git a/docs/index.html b/docs/index.html index d3afba64..04d3a773 100644 --- a/docs/index.html +++ b/docs/index.html @@ -37,8 +37,8 @@
- -
🪿
+ ... +

LinGoose

@@ -59,6 +59,7 @@

🪿 LinGoose

Build Status GoDoc Go Report Card GitHub release

LinGoose (Lingo + Go + Goose 🪿) aims to be a complete Go framework for creating LLM apps. 🤖 ⚙️

+

Did you know? A goose 🪿 fills its car 🚗 with goose-line ⛽!

Overview

LinGoose is a powerful Go framework for developing Large Language Model (LLM) based applications using pipelines. It is designed to be a complete solution and provides multiple components, including Prompts, Templates, Chat, Output Decoders, LLM, Pipelines, and Memory. With LinGoose, you can interact with LLM AI through prompts and generate complex templates. Additionally, it includes a chat feature, allowing you to create chatbots. The Output Decoders component enables you to extract specific information from the output of the LLM, while the LLM interface allows you to send prompts to various AI, such as the ones provided by OpenAI. You can chain multiple LMM steps together using Pipelines and store the output of each step in Memory for later retrieval.

Components

@@ -111,116 +112,119 @@

Components

Usage

Please refer to the examples directory to see other examples. However, here is an example of what LinGoose is capable of:

Talk is cheap. Show me the code. - Linus Torvalds

-
package main
-                
-                import (
-                    "encoding/json"
-                    "fmt"
-                
-                    "github.com/henomis/lingoose/decoder"
-                    "github.com/henomis/lingoose/llm/openai"
-                    "github.com/henomis/lingoose/memory/ram"
-                    "github.com/henomis/lingoose/pipeline"
-                    "github.com/henomis/lingoose/prompt"
-                )
-                
-                func main() {
-                
-                    llmOpenAI, err := openai.New(openai.GPT3TextDavinci003, true)
-                    if err != nil {
-                        panic(err)
-                    }
-                    cache := ram.New()
-                
-                    prompt1 := prompt.New("Hello how are you?")
-                    pipe1 := pipeline.NewStep(
-                        "step1",
-                        llmOpenAI,
-                        prompt1,
-                        nil,
-                        decoder.NewDefaultDecoder(),
-                        cache,
-                    )
-                
-                    prompt2, _ := prompt.NewPromptTemplate(
-                        "Consider the following sentence.\n\nSentence:\n{{.output}}\n\n" +
-                        "Translate it in {{.language}}!"",
-                        map[string]string{
-                            "language": "italian",
-                        },
-                    )
-                    pipe2 := pipeline.NewStep(
-                        "step2",
-                        llmOpenAI,
-                        prompt2,
-                        nil,
-                        decoder.NewDefaultDecoder(),
-                        nil,
-                    )
-                    
-                    prompt3, _ := prompt.NewPromptTemplate(
-                        "Consider the following sentence.\n\nSentence:\n{{.step1.output}}" +
-                        "\n\nTranslate it in {{.language}}!`"",
-                        map[string]string{
-                            "language": "spanish",
-                        },
-                    )
-                    pipe3 := pipeline.NewStep(
-                        "step3",
-                        llmOpenAI,
-                        prompt3,
-                        nil,
-                        decoder.NewDefaultDecoder(),
-                        cache,
-                    )
-                    
-                    pipelineSteps := pipeline.New(
-                        pipe1,
-                        pipe2,
-                        pipe3,
-                    )
-                    
-                    response, err := pipelineSteps.Run(nil)
-                    if err != nil {
-                        fmt.Println(err)
-                    }
-                
-                    fmt.Printf("\n\nFinal output: %#v\n\n", response)
-                
-                    fmt.Println("---Memory---")
-                    dump, _ := json.MarshalIndent(cache.All(), "", "  ")
-                    fmt.Printf("%s\n", string(dump))
-                }
-                
+
+                    
+package main
+
+import (
+	"encoding/json"
+	"fmt"
+
+	"github.com/henomis/lingoose/decoder"
+	"github.com/henomis/lingoose/llm/openai"
+	"github.com/henomis/lingoose/memory/ram"
+	"github.com/henomis/lingoose/pipeline"
+	"github.com/henomis/lingoose/prompt"
+)
+
+func main() {
+
+	llmOpenAI, err := openai.New(openai.GPT3TextDavinci003, true)
+	if err != nil {
+		panic(err)
+	}
+	cache := ram.New()
+
+	prompt1 := prompt.New("Hello how are you?")
+	pipe1 := pipeline.NewStep(
+		"step1",
+		llmOpenAI,
+		prompt1,
+		decoder.NewDefaultDecoder(),
+		cache,
+	)
+
+	prompt2, _ := prompt.NewPromptTemplate(
+		"Consider the following sentence.\n\nSentence:\n{{.output}}\n\n"+
+			"Translate it in {{.language}}!",
+		map[string]string{
+			"language": "italian",
+		},
+	)
+	pipe2 := pipeline.NewStep(
+		"step2",
+		llmOpenAI,
+		prompt2,
+		decoder.NewDefaultDecoder(),
+		nil,
+	)
+
+	prompt3, _ := prompt.NewPromptTemplate(
+		"Consider the following sentence.\n\nSentence:\n{{.step1.output}}"+
+			"\n\nTranslate it in {{.language}}!",
+		map[string]string{
+			"language": "spanish",
+		},
+	)
+	pipe3 := pipeline.NewStep(
+		"step3",
+		llmOpenAI,
+		prompt3,
+		decoder.NewDefaultDecoder(),
+		cache,
+	)
+
+	pipelineSteps := pipeline.New(
+		pipe1,
+		pipe2,
+		pipe3,
+	)
+
+	response, err := pipelineSteps.Run(nil)
+	if err != nil {
+		fmt.Println(err)
+	}
+
+	fmt.Printf("\n\nFinal output: %#v\n\n", response)
+
+	fmt.Println("---Memory---")
+	dump, _ := json.MarshalIndent(cache.All(), "", "  ")
+	fmt.Printf("%s\n", string(dump))
+}
+                    
+                

Running this example will produce the following output:

-
---USER---
-                Hello how are you?
-                ---AI---
-                I'm doing well, thank you. How about you?
-                ---USER---
-                Consider the following sentence.\n\nSentence:\nI'm doing well, thank you. How about you?\n\n
-                                Translate it in italian!
-                ---AI---
-                Sto bene, grazie. E tu come stai?
-                ---USER---
-                Consider the following sentence.\n\nSentence:\nI'm doing well, thank you. How about you?
-                                \n\nTranslate it in spanish!
-                ---AI---
-                Estoy bien, gracias. ¿Y tú
-                
-                
-                Final output: map[string]interface {}{"output":"Estoy bien, gracias. ¿Y tú"}
-                
-                ---Memory---
-                {
-                  "step1": {
-                    "output": "I'm doing well, thank you. How about you?"
-                  },
-                  "step3": {
-                    "output": "Estoy bien, gracias. ¿Y tú"
-                  }
-                }
-                
+
+                    
+---USER---
+Hello how are you?
+---AI---
+I'm doing well, thank you. How about you?
+---USER---
+Consider the following sentence.\n\nSentence:\nI'm doing well, thank you. How about you?\n\n
+                Translate it in italian!
+---AI---
+Sto bene, grazie. E tu come stai?
+---USER---
+Consider the following sentence.\n\nSentence:\nI'm doing well, thank you. How about you?
+                \n\nTranslate it in spanish!
+---AI---
+Estoy bien, gracias. ¿Y tú
+
+
+Final output: map[string]interface {}{"output":"Estoy bien, gracias. ¿Y tú"}
+
+---Memory---
+{
+    "step1": {
+    "output": "I'm doing well, thank you. How about you?"
+    },
+    "step3": {
+    "output": "Estoy bien, gracias. ¿Y tú"
+    }
+}
+                    
+                

Installation

Be sure to have a working Go environment, then run the following command:

go get github.com/henomis/lingoose
diff --git a/examples/chat/main.go b/examples/chat/main.go
index 9e625f11..052f6c85 100644
--- a/examples/chat/main.go
+++ b/examples/chat/main.go
@@ -17,7 +17,7 @@ func main() {
 		},
 		chat.PromptMessage{
 			Type:   chat.MessageTypeUser,
-			Prompt: prompt.New("Write a joke about a cat"),
+			Prompt: prompt.New("Write a joke about a goose"),
 		},
 	)
 
diff --git a/examples/pipeline/chat/main.go b/examples/pipeline/chat/main.go
index c34d52a6..9ed241ed 100644
--- a/examples/pipeline/chat/main.go
+++ b/examples/pipeline/chat/main.go
@@ -87,7 +87,7 @@ func main() {
 
 	values := map[string]string{
 		"role":   "joke writer",
-		"animal": "cat",
+		"animal": "goose",
 	}
 	response, err := pipe.Run(values)
 	if err != nil {