diff --git a/changelog/unreleased/kong/fix-ai-proxy-model-name-slash.yml b/changelog/unreleased/kong/fix-ai-proxy-model-name-slash.yml new file mode 100644 index 00000000000..545373f740f --- /dev/null +++ b/changelog/unreleased/kong/fix-ai-proxy-model-name-slash.yml @@ -0,0 +1,3 @@ +message: "**ai-proxy**: Fixed a panic triggered by a non-JSON response." +type: "bugfix" +scope: "Plugin" diff --git a/kong/llm/plugin/shared-filters/serialize-analytics.lua b/kong/llm/plugin/shared-filters/serialize-analytics.lua index 390d528450f..8a71aa9d732 100644 --- a/kong/llm/plugin/shared-filters/serialize-analytics.lua +++ b/kong/llm/plugin/shared-filters/serialize-analytics.lua @@ -1,4 +1,4 @@ -local cjson = require("cjson") +local cjson = require("cjson.safe") local ai_plugin_ctx = require("kong.llm.plugin.ctx") local ai_plugin_o11y = require("kong.llm.plugin.observability") @@ -39,7 +39,10 @@ function _M:run(conf) else -- openai formats - local response_body_table = cjson.decode(response_body) + local response_body_table, err = cjson.decode(response_body) + if err then + kong.log.info("failed to decode response body: ", err) + end response_model = response_body_table and response_body_table.model end end