From e9973e2acf2778c5051e62a858c1fc191e60f2c6 Mon Sep 17 00:00:00 2001 From: otdoges Date: Sun, 20 Jul 2025 10:04:38 -0500 Subject: [PATCH] Potential fix for code scanning alert no. 30: Disallow the `any` type Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com> --- src/lib/multiModelAI.ts | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/lib/multiModelAI.ts b/src/lib/multiModelAI.ts index 9ea35de9..b79efae4 100644 --- a/src/lib/multiModelAI.ts +++ b/src/lib/multiModelAI.ts @@ -5,6 +5,11 @@ import systemPrompt from './systemPrompt'; import { geminiManager, type TaskAssignment } from './geminiManager'; import { kimiK2, type KimiK2Response } from './kimiK2'; +interface GenerationOptions { + temperature?: number; + maxTokens?: number; + includeReasoning?: boolean; +} export interface MultiModelResponse { content: string; model: string; @@ -338,8 +343,8 @@ class MultiModelAI { } private async generateLegacyMultiModelResponse( - messages: any[], - options: any + messages: { role: string; content: string }[], + options: GenerationOptions ): Promise { // Legacy implementation for fallback const { temperature = 0.7, maxTokens = 4000, includeReasoning = true } = options; @@ -390,8 +395,8 @@ class MultiModelAI { } private async streamLegacyResponse( - messages: any[], - options: any + messages: { role: string; content: string }[], + options: GenerationOptions ): Promise> { const { temperature = 0.7, maxTokens = 4000 } = options; const model = groq(MULTI_MODEL_CONFIG.primary); // Use best preview model for streaming