diff --git a/chipper/intent-data/en-US.json b/chipper/intent-data/en-US.json index b1ee737a..8d3225df 100755 --- a/chipper/intent-data/en-US.json +++ b/chipper/intent-data/en-US.json @@ -141,7 +141,7 @@ }, { "name": "intent_greeting_hello", - "keyphrases" : ["hello", "our you", "high", "below", "little", "follow", "for you", "far you", "how about you", "how are you", "the low", "the loo" ] + "keyphrases" : ["hello", "our you", "high", "below", "little", "follow", "for you", "far you", "how about you", "how are you", "the low", "the loo", "our are you" ] }, { "name": "intent_imperative_come", diff --git a/chipper/pkg/wirepod/config-ws/webserver.go b/chipper/pkg/wirepod/config-ws/webserver.go index 7bd478fe..22c4156d 100755 --- a/chipper/pkg/wirepod/config-ws/webserver.go +++ b/chipper/pkg/wirepod/config-ws/webserver.go @@ -157,7 +157,7 @@ func apiHandler(w http.ResponseWriter, r *http.Request) { vars.APIConfig.Weather.Enable = false } else { vars.APIConfig.Weather.Enable = true - vars.APIConfig.Weather.Key = weatherAPIKey + vars.APIConfig.Weather.Key = strings.TrimSpace(weatherAPIKey) vars.APIConfig.Weather.Provider = weatherProvider } vars.WriteConfigToDisk() @@ -192,9 +192,9 @@ func apiHandler(w http.ResponseWriter, r *http.Request) { } else { vars.APIConfig.Knowledge.Enable = true vars.APIConfig.Knowledge.Provider = kgProvider - vars.APIConfig.Knowledge.Key = kgAPIKey - vars.APIConfig.Knowledge.Model = kgModel - vars.APIConfig.Knowledge.ID = kgAPIID + vars.APIConfig.Knowledge.Key = strings.TrimSpace(kgAPIKey) + vars.APIConfig.Knowledge.Model = strings.TrimSpace(kgModel) + vars.APIConfig.Knowledge.ID = strings.TrimSpace(kgAPIID) } if kgModel == "" && kgProvider == "together" { logger.Println("Together model wasn't provided, using default meta-llama/Llama-2-70b-chat-hf") @@ -217,7 +217,7 @@ func apiHandler(w http.ResponseWriter, r *http.Request) { if r.FormValue("robot_name") == "" { vars.APIConfig.Knowledge.RobotName = "Vector" } else { - vars.APIConfig.Knowledge.RobotName = r.FormValue("robot_name") + vars.APIConfig.Knowledge.RobotName = strings.TrimSpace(r.FormValue("robot_name")) } } else if (kgProvider == "openai" || kgProvider == "together") && kgIntent == "false" { vars.APIConfig.Knowledge.IntentGraph = false diff --git a/chipper/pkg/wirepod/preqs/intent.go b/chipper/pkg/wirepod/preqs/intent.go index 677b66dd..17a898cf 100755 --- a/chipper/pkg/wirepod/preqs/intent.go +++ b/chipper/pkg/wirepod/preqs/intent.go @@ -42,6 +42,9 @@ func (s *Server) ProcessIntent(req *vtt.IntentRequest) (*vtt.IntentResponse, err _, err := ttr.StreamingKGSim(req, req.Device, transcribedText) if err != nil { logger.Println("LLM error: " + err.Error()) + logger.LogUI("LLM error: " + err.Error()) + ttr.IntentPass(req, "intent_system_unmatched", transcribedText, map[string]string{"": ""}, false) + ttr.KGSim(req.Device, "There was an error getting a response from the L L M. Check the logs in the web interface.") } logger.Println("Bot " + speechReq.Device + " request served.") return nil, nil diff --git a/chipper/pkg/wirepod/preqs/intent_graph.go b/chipper/pkg/wirepod/preqs/intent_graph.go index aa2eb91c..4f5269b3 100755 --- a/chipper/pkg/wirepod/preqs/intent_graph.go +++ b/chipper/pkg/wirepod/preqs/intent_graph.go @@ -59,6 +59,9 @@ func (s *Server) ProcessIntentGraph(req *vtt.IntentGraphRequest) (*vtt.IntentGra _, err := ttr.StreamingKGSim(req, req.Device, transcribedText) if err != nil { logger.Println("LLM error: " + err.Error()) + logger.LogUI("LLM error: " + err.Error()) + ttr.IntentPass(req, "intent_system_unmatched", transcribedText, map[string]string{"": ""}, false) + ttr.KGSim(req.Device, "There was an error getting a response from the L L M. Check the logs in the web interface.") } logger.Println("Bot " + speechReq.Device + " request served.") return nil, nil diff --git a/chipper/pkg/wirepod/ttr/kgsim.go b/chipper/pkg/wirepod/ttr/kgsim.go index 31cd593f..9ee8ad11 100644 --- a/chipper/pkg/wirepod/ttr/kgsim.go +++ b/chipper/pkg/wirepod/ttr/kgsim.go @@ -73,6 +73,10 @@ func StreamingKGSim(req interface{}, esn string, transcribedText string) (string var isDone bool var c *openai.Client if vars.APIConfig.Knowledge.Provider == "together" { + if vars.APIConfig.Knowledge.Model == "" { + vars.APIConfig.Knowledge.Model = "meta-llama/Llama-2-70b-chat-hf" + vars.WriteConfigToDisk() + } conf := openai.DefaultConfig(vars.APIConfig.Knowledge.Key) conf.BaseURL = "https://api.together.xyz/v1" c = openai.NewClientWithConfig(conf) @@ -128,8 +132,10 @@ func StreamingKGSim(req interface{}, esn string, transcribedText string) (string if err != nil { if strings.Contains(err.Error(), "does not exist") && vars.APIConfig.Knowledge.Provider == "openai" { logger.Println("GPT-4 model cannot be accessed with this API key. You likely need to add more than $5 dollars of funds to your OpenAI account.") + logger.LogUI("GPT-4 model cannot be accessed with this API key. You likely need to add more than $5 dollars of funds to your OpenAI account.") aireq.Model = openai.GPT3Dot5Turbo logger.Println("Falling back to " + aireq.Model) + logger.LogUI("Falling back to " + aireq.Model) stream, err = c.CreateChatCompletionStream(ctx, aireq) if err != nil { logger.Println("OpenAI still not returning a response even after falling back. Erroring.") diff --git a/chipper/webroot/index.html b/chipper/webroot/index.html index f1c0531b..65deef8a 100755 --- a/chipper/webroot/index.html +++ b/chipper/webroot/index.html @@ -100,7 +100,11 @@