Skip to content

Commit

Permalink
Merge pull request #1559 from benjidotsh/ai/chat-completion-models
Browse files Browse the repository at this point in the history
feat(ai): add support for chat completion models
  • Loading branch information
Xantier authored Aug 20, 2024
2 parents 24c08e3 + 35d5410 commit f38d4b3
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 5 deletions.
5 changes: 5 additions & 0 deletions .changeset/chatty-pumpkins-shave.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@roadiehq/rag-ai-backend': patch
---

Added support for chat completion models
8 changes: 5 additions & 3 deletions plugins/backend/rag-ai-backend/src/service/LlmService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,14 @@
* limitations under the License.
*/
import { BaseLLM } from '@langchain/core/language_models/llms';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { EmbeddingDoc } from '@roadiehq/rag-ai-node';
import { Logger } from 'winston';
import { createPromptTemplates } from './prompts';

export class LlmService {
private readonly logger: Logger;
private readonly model: BaseLLM;
private readonly model: BaseLLM | BaseChatModel;
private readonly prompts: {
prefixPrompt: (embedding: string) => string;
suffixPrompt: (input: string) => string;
Expand All @@ -32,7 +33,7 @@ export class LlmService {
configuredPrompts,
}: {
logger: Logger;
model: BaseLLM;
model: BaseLLM | BaseChatModel;
configuredPrompts?: {
prefix?: string;
suffix?: string;
Expand All @@ -51,6 +52,7 @@ export class LlmService {
const prompt = `Human:\n${this.prompts.prefixPrompt(
promptEmbeddings,
)}\n ---\n${this.prompts.suffixPrompt(query)}\nAssistant:`;
return await this.model.invoke(prompt);
const response = await this.model.invoke(prompt);
return typeof response === 'string' ? response : response.content;
}
}
3 changes: 2 additions & 1 deletion plugins/backend/rag-ai-backend/src/service/router.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import Router from 'express-promise-router';
import { Logger } from 'winston';
import { AugmentationIndexer, RetrievalPipeline } from '@roadiehq/rag-ai-node';
import { BaseLLM } from '@langchain/core/language_models/llms';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { LlmService } from './LlmService';
import { RagAiController } from './RagAiController';
import { isEmpty } from 'lodash';
Expand All @@ -36,7 +37,7 @@ export interface RouterOptions {
logger: Logger;
augmentationIndexer: AugmentationIndexer;
retrievalPipeline: RetrievalPipeline;
model: BaseLLM;
model: BaseLLM | BaseChatModel;
config: Config;
}

Expand Down
3 changes: 2 additions & 1 deletion plugins/backend/rag-ai-backend/src/service/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,14 @@ import { TokenManager } from '@backstage/backend-common';
import { AugmentationIndexer, RetrievalPipeline } from '@roadiehq/rag-ai-node';
import { Logger } from 'winston';
import { BaseLLM } from '@langchain/core/language_models/llms';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Config } from '@backstage/config';

export interface RagAiConfig {
logger: Logger;
tokenManager: TokenManager;
augmentationIndexer: AugmentationIndexer;
retrievalPipeline: RetrievalPipeline;
model: BaseLLM;
model: BaseLLM | BaseChatModel;
config: Config;
}

0 comments on commit f38d4b3

Please sign in to comment.