Skip to content

Commit

Permalink
fix(ai-proxy): plugin config should own the tuning parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
tysoekong committed Jun 3, 2024
1 parent 08790c3 commit 3cd4e1a
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 6 deletions.
8 changes: 4 additions & 4 deletions kong/llm/drivers/shared.lua
Original file line number Diff line number Diff line change
Expand Up @@ -131,10 +131,10 @@ _M.clear_response_headers = {
-- @return {string} error if any is thrown - request should definitely be terminated if this is not nil
function _M.merge_config_defaults(request, options, request_format)
if options then
request.temperature = request.temperature or options.temperature
request.max_tokens = request.max_tokens or options.max_tokens
request.top_p = request.top_p or options.top_p
request.top_k = request.top_k or options.top_k
request.temperature = options.temperature or request.temperature
request.max_tokens = options.max_tokens or request.max_tokens
request.top_p = options.top_p or request.top_p
request.top_k = options.top_k or request.top_k
end

return request, nil
Expand Down
4 changes: 2 additions & 2 deletions spec/03-plugins/38-ai-proxy/01-unit_spec.lua
Original file line number Diff line number Diff line change
Expand Up @@ -628,8 +628,8 @@ describe(PLUGIN_NAME .. ": (unit)", function()
local formatted, err = ai_shared.merge_config_defaults(
SAMPLE_LLM_V1_CHAT_WITH_SOME_OPTS,
{
max_tokens = 1024,
top_p = 1.0,
max_tokens = 256,
top_p = 0.2,
},
"llm/v1/chat"
)
Expand Down
17 changes: 17 additions & 0 deletions spec/03-plugins/38-ai-proxy/02-openai_integration_spec.lua
Original file line number Diff line number Diff line change
Expand Up @@ -841,6 +841,23 @@ for _, strategy in helpers.all_strategies() do if strategy ~= "cassandra" then
}, json.choices[1].message)
end)

it("tries to override configured model", function()
local r = client:get("/openai/llm/v1/chat/good", {
headers = {
["content-type"] = "application/json",
["accept"] = "application/json",
},
body = pl_file.read("spec/fixtures/ai-proxy/openai/llm-v1-chat/requests/good_own_model.json"),
})

-- validate that the request succeeded, response status 200
local body = assert.res_status(400 , r)
local json = cjson.decode(body)

-- check this is in the 'kong' response format
assert.same(json, {error = { message = "cannot use own model - must be: gpt-3.5-turbo" } })
end)

it("bad upstream response", function()
local r = client:get("/openai/llm/v1/chat/bad_upstream_response", {
headers = {
Expand Down

0 comments on commit 3cd4e1a

Please sign in to comment.