From c63c967ff3de140c7b9f6af2bc6349f001004c08 Mon Sep 17 00:00:00 2001 From: Christoph Dreis Date: Tue, 7 Nov 2023 14:26:33 +0100 Subject: [PATCH] Align token count for gpt-3.5-turbo with official docs --- command/openai/tokens.go | 2 +- command/openai/tokens_test.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/command/openai/tokens.go b/command/openai/tokens.go index 8abfecf3..e5b211e8 100644 --- a/command/openai/tokens.go +++ b/command/openai/tokens.go @@ -10,7 +10,7 @@ var maxTokens = map[string]int{ "gpt-4": 8192, "gpt-4-32k": 32768, "gpt-3.5-turbo-16k": 16385, - "gpt-3.5-turbo": 4097, + "gpt-3.5-turbo": 4096, "gpt-4-1106-preview": 128000, "gpt-4-vision-preview": 128000, "dummy-test": 100, // just for testing diff --git a/command/openai/tokens_test.go b/command/openai/tokens_test.go index 8f1e46e3..70d7e566 100644 --- a/command/openai/tokens_test.go +++ b/command/openai/tokens_test.go @@ -16,7 +16,7 @@ func TestModels(t *testing.T) { {"gpt-4", 8192}, {"gpt-4-0613", 8192}, {"gpt-4-32k-0613", 32768}, - {"gpt-3.5-turbo", 4097}, + {"gpt-3.5-turbo", 4096}, {"gpt-3.5-turbo-16k-0613", 16385}, }