Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 3 additions & 7 deletions apps/server/src/routes/api/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -550,13 +550,9 @@ async function handleStreamingProcess(
const aiServiceManager = await import('../../services/llm/ai_service_manager.js');
await aiServiceManager.default.getOrCreateAnyService();

// Use the chat pipeline directly for streaming
const { ChatPipeline } = await import('../../services/llm/pipeline/chat_pipeline.js');
const pipeline = new ChatPipeline({
enableStreaming: true,
enableMetrics: true,
maxToolCallIterations: 5
});
// Use the V2 pipeline directly for streaming
const pipelineV2Module = await import('../../services/llm/pipeline/pipeline_v2.js');
const pipeline = pipelineV2Module.default;

// Get selected model
const { getSelectedModelConfig } = await import('../../services/llm/config/configuration_helpers.js');
Expand Down
18 changes: 5 additions & 13 deletions apps/server/src/services/llm/chat/rest_chat_service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@ import log from "../../log.js";
import type { Request, Response } from "express";
import type { Message } from "../ai_interface.js";
import aiServiceManager from "../ai_service_manager.js";
import { ChatPipeline } from "../pipeline/chat_pipeline.js";
import type { ChatPipelineInput } from "../pipeline/interfaces.js";
import pipelineV2, { type PipelineV2Input } from "../pipeline/pipeline_v2.js";
import options from "../../options.js";
import { ToolHandler } from "./handlers/tool_handler.js";
import chatStorageService from '../chat_storage_service.js';
Expand Down Expand Up @@ -113,13 +112,6 @@ class RestChatService {
// Initialize tools
await ToolHandler.ensureToolsInitialized();

// Create and use the chat pipeline
const pipeline = new ChatPipeline({
enableStreaming: req.method === 'GET',
enableMetrics: true,
maxToolCallIterations: 5
});

// Get user's preferred model
const preferredModel = await this.getPreferredModel();

Expand All @@ -128,7 +120,8 @@ class RestChatService {
systemPrompt: chat.messages.find(m => m.role === 'system')?.content,
model: preferredModel,
stream: !!(req.method === 'GET' || req.query.format === 'stream' || req.query.stream === 'true'),
chatNoteId: chatNoteId
chatNoteId: chatNoteId,
enableTools: true
};

log.info(`Pipeline options: ${JSON.stringify({ useAdvancedContext: pipelineOptions.useAdvancedContext, stream: pipelineOptions.stream })}`);
Expand All @@ -137,22 +130,21 @@ class RestChatService {
const wsService = await import('../../ws.js');
const accumulatedContentRef = { value: '' };

const pipelineInput: ChatPipelineInput = {
const pipelineInput: PipelineV2Input = {
messages: chat.messages.map(msg => ({
role: msg.role as 'user' | 'assistant' | 'system',
content: msg.content
})),
query: content || '',
noteId: undefined, // TODO: Add context note support if needed
showThinking: showThinking,
options: pipelineOptions,
streamCallback: req.method === 'GET' ? (data, done, rawChunk) => {
this.handleStreamCallback(data, done, rawChunk, wsService.default, chatNoteId, res, accumulatedContentRef, chat);
} : undefined
};

// Execute the pipeline
const response = await pipeline.execute(pipelineInput);
const response = await pipelineV2.execute(pipelineInput);

if (req.method === 'POST') {
// Add assistant response to chat
Expand Down
Loading