Skip to content

Commit

Permalink
fix(ai-proxy): lint
Browse files Browse the repository at this point in the history
  • Loading branch information
tysoekong committed Jul 5, 2024
1 parent 5a6daca commit 61d44d1
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 31 deletions.
2 changes: 2 additions & 0 deletions kong-3.8.0-0.rockspec
Original file line number Diff line number Diff line change
Expand Up @@ -605,6 +605,8 @@ build = {
["kong.llm.drivers.mistral"] = "kong/llm/drivers/mistral.lua",
["kong.llm.drivers.llama2"] = "kong/llm/drivers/llama2.lua",

["kong.llm.drivers.gemini"] = "kong/llm/drivers/gemini.lua",

["kong.plugins.ai-prompt-decorator.handler"] = "kong/plugins/ai-prompt-decorator/handler.lua",
["kong.plugins.ai-prompt-decorator.schema"] = "kong/plugins/ai-prompt-decorator/schema.lua",

Expand Down
16 changes: 2 additions & 14 deletions kong/llm/drivers/gemini.lua
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ local string_gsub = string.gsub
local buffer = require("string.buffer")
local table_insert = table.insert
local string_lower = string.lower
local string_sub = string.sub
--

-- globals
Expand Down Expand Up @@ -49,10 +48,7 @@ local function is_response_finished(content)
and content.candidates[1].finishReason
end

local function handle_stream_event(event_t, model_info, route_type)
local metadata


local function handle_stream_event(event_t, model_info, route_type)
-- discard empty frames, it should either be a random new line, or comment
if (not event_t.data) or (#event_t.data < 1) then
return
Expand All @@ -65,7 +61,7 @@ local function handle_stream_event(event_t, model_info, route_type)
end

local new_event
local metadata
local metadata = nil

if is_response_content(event) then
new_event = {
Expand Down Expand Up @@ -219,14 +215,6 @@ local function from_gemini_chat_openai(response, model_info, route_type)
return cjson.encode(messages)
end

local function to_gemini_chat_gemini(request_table, model_info, route_type)
return nil, nil, "gemini to gemini not yet implemented"
end

local function from_gemini_chat_gemini(request_table, model_info, route_type)
return nil, nil, "gemini to gemini not yet implemented"
end

local transformers_to = {
["llm/v1/chat"] = to_gemini_chat_openai,
}
Expand Down
4 changes: 1 addition & 3 deletions kong/plugins/ai-proxy/handler.lua
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ local _M = {


-- static messages
local ERROR_MSG = { error = { message = "" } }
local ERROR__NOT_SET = 'data: {"error": true, "message": "empty or unsupported transformer response"}'


Expand Down Expand Up @@ -481,8 +480,7 @@ function _M:access(conf)
if identity_interface and identity_interface.error then
kong.ctx.shared.skip_response_transformer = true
kong.log.err("error authenticating with cloud-provider, ", identity_interface.error)

return internal_server_error("LLM request failed before proxying")
return kong.response.exit(500, "LLM request failed before proxying")
end

-- now re-configure the request for this operation type
Expand Down
14 changes: 0 additions & 14 deletions spec/03-plugins/38-ai-proxy/01-unit_spec.lua
Original file line number Diff line number Diff line change
Expand Up @@ -660,20 +660,6 @@ describe(PLUGIN_NAME .. ": (unit)", function()
}, formatted)
end)


local function dump(o)
if type(o) == 'table' then
local s = '{ '
for k,v in pairs(o) do
if type(k) ~= 'number' then k = '"'..k..'"' end
s = s .. '['..k..'] = ' .. dump(v) .. ','
end
return s .. '} '
else
return tostring(o)
end
end

describe("streaming transformer tests", function()

it("transforms truncated-json type (beginning of stream)", function()
Expand Down

0 comments on commit 61d44d1

Please sign in to comment.