Skip to content
This repository was archived by the owner on Apr 15, 2025. It is now read-only.

Commit 914f8ea

Browse files
committed
remove 3.5
1 parent d9775d2 commit 914f8ea

File tree

6 files changed

+10
-10
lines changed

6 files changed

+10
-10
lines changed

.github/workflows/build.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ jobs:
4444
env:
4545
GITHUB_TOKEN: ${{ secrets.GHCR_PAT }}
4646
with:
47-
tag_name: v2.1.1
47+
tag_name: v2.1.2
4848
files: |
4949
duck2api-linux-amd64.tar.gz
5050
duck2api-windows-amd64.tar.gz

conversion/requests/duckgo/convert.go

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,11 @@ func ConvertAPIRequest(api_request officialtypes.APIRequest) duckgotypes.ApiRequ
1111
duckgo_request := duckgotypes.NewApiRequest(inputModel)
1212
realModel := inputModel
1313

14-
// 模型映射,简化用户输入模型,例如 gpt-3.5 --> gpt-3.5-turbo-0125
1514
// 如果模型未进行映射,则直接使用输入模型,方便后续用户使用 duckduckgo 添加的新模型。
1615
modelLower := strings.ToLower(inputModel)
1716
switch {
1817
case strings.HasPrefix(modelLower, "gpt-3.5"):
19-
realModel = "gpt-3.5-turbo-0125"
18+
realModel = "gpt-4o-mini"
2019
case strings.HasPrefix(modelLower, "claude-3-haiku"):
2120
realModel = "claude-3-haiku-20240307"
2221
case strings.HasPrefix(modelLower, "llama-3-70b"):

initialize/handlers.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,7 @@ func (h *Handler) engines(c *gin.Context) {
9595

9696
// Supported models
9797
modelIDs := []string{
98+
"gpt-4o-mini",
9899
"gpt-3.5-turbo-0125",
99100
"claude-3-haiku-20240307",
100101
"meta-llama/Llama-3-70b-chat-hf",

typings/official/response.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ func NewChatCompletionChunk(text string) ChatCompletionChunk {
3131
ID: "chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK",
3232
Object: "chat.completion.chunk",
3333
Created: 0,
34-
Model: "gpt-3.5-turbo-0301",
34+
Model: "gpt-4o-mini",
3535
Choices: []Choices{
3636
{
3737
Index: 0,
@@ -82,7 +82,7 @@ func StopChunk(reason string) ChatCompletionChunk {
8282
ID: "chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK",
8383
Object: "chat.completion.chunk",
8484
Created: 0,
85-
Model: "gpt-3.5-turbo-0125",
85+
Model: "gpt-4o-mini",
8686
Choices: []Choices{
8787
{
8888
Index: 0,
@@ -143,7 +143,7 @@ func NewChatCompletion(full_test string, input_tokens, output_tokens int) ChatCo
143143
ID: "chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK",
144144
Object: "chat.completion",
145145
Created: int64(0),
146-
Model: "gpt-3.5-turbo-0125",
146+
Model: "gpt-4o-mini",
147147
Usage: usage{
148148
PromptTokens: input_tokens,
149149
CompletionTokens: output_tokens,

util/util.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
11
package util
22

33
import (
4-
"github.com/pkoukk/tiktoken-go"
54
"log/slog"
65
"math/rand"
76
"time"
7+
8+
"github.com/pkoukk/tiktoken-go"
89
)
910

1011
func RandomLanguage() string {
@@ -28,7 +29,7 @@ func RandomHexadecimalString() string {
2829
return string(b)
2930
}
3031
func CountToken(input string) int {
31-
encoding := "gpt-3.5-turbo"
32+
encoding := "gpt-4o-mini"
3233
tkm, err := tiktoken.EncodingForModel(encoding)
3334
if err != nil {
3435
slog.Warn("tiktoken.EncodingForModel error:", err)

web/index.html

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2476,7 +2476,6 @@
24762476
<div class="presetSelect presetModelCls">
24772477
<label for="preSetModel" data-i18n-key="gptModel"></label>
24782478
<select id="preSetModel">
2479-
<option value="gpt-3.5-turbo-0125">gpt-3.5-turbo-0125</option>
24802479
<option value="gpt-4o-mini">gpt-4o-mini</option>
24812480
<option value="claude-3-haiku-20240307">claude-3-haiku-20240307</option>
24822481
<option value="llama-3-70b">llama-3-70b</option>
@@ -5716,7 +5715,7 @@
57165715
const initSetting = () => {
57175716
const modelEle = document.getElementById("preSetModel");
57185717
let localModel = localStorage.getItem("modelVersion");
5719-
modelVersion = modelEle.value = localModel || "gpt-3.5-turbo";
5718+
modelVersion = modelEle.value = localModel || "gpt-4o-mini";
57205719
modelEle.onchange = () => {
57215720
modelVersion = modelEle.value;
57225721
localStorage.setItem("modelVersion", modelVersion);

0 commit comments

Comments
 (0)