Skip to content

Commit

Permalink
feat: expose model name (#41)
Browse files Browse the repository at this point in the history
  • Loading branch information
mdjastrzebski authored Jan 8, 2025
1 parent ea21cc7 commit 21d4f2e
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 9 deletions.
5 changes: 5 additions & 0 deletions .changeset/hot-spoons-worry.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@callstack/byorg-core': minor
---

core: expose `ChatModel.name` property
1 change: 1 addition & 0 deletions packages/core/src/ai/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,6 @@ export type ModelUsage = {
};

export interface ChatModel {
name: string;
generateResponse(context: RequestContext): Promise<AssistantResponse>;
}
24 changes: 16 additions & 8 deletions packages/core/src/ai/vercel.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ const VERCEL_AI_SHARED_OPTIONS = {
},
};

export type VercelChatModelAdapterOptions = {
export type VercelChatModelAdapterConfig = {
languageModel: LanguageModel;
maxTokens?: number;
maxSteps?: number;
Expand All @@ -51,7 +51,15 @@ type AiExecutionResult = {
};

export class VercelChatModelAdapter implements ChatModel {
constructor(private readonly _options: VercelChatModelAdapterOptions) {}
config: VercelChatModelAdapterConfig;

constructor(config: VercelChatModelAdapterConfig) {
this.config = config;
}

get name(): string {
return this.config.languageModel.modelId;
}

async generateResponse(context: RequestContext): Promise<AssistantResponse> {
let systemPrompt = context.systemPrompt();
Expand Down Expand Up @@ -123,10 +131,10 @@ export class VercelChatModelAdapter implements ChatModel {
const startTime = performance.now();
const result = await streamText({
...VERCEL_AI_SHARED_OPTIONS,
model: this._options.languageModel,
model: this.config.languageModel,
maxTokens: this.config.maxTokens,
maxSteps: this.config.maxSteps,
messages: context.messages,
maxTokens: this._options.maxTokens,
maxSteps: this._options.maxSteps,
tools: context.tools,
});

Expand Down Expand Up @@ -156,10 +164,10 @@ export class VercelChatModelAdapter implements ChatModel {
const startTime = performance.now();
const result = await generateText({
...VERCEL_AI_SHARED_OPTIONS,
model: this._options.languageModel,
model: this.config.languageModel,
maxTokens: this.config.maxTokens,
maxSteps: this.config.maxSteps,
messages: context.messages,
maxTokens: this._options.maxTokens,
maxSteps: this._options.maxSteps,
tools: context.tools,
});
const responseTime = performance.now() - startTime;
Expand Down
2 changes: 1 addition & 1 deletion packages/core/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ export { createApp } from './application.js';
export type { Middleware, NextFunction } from './middleware.js';

export type { AssistantResponse, ChatModel, ModelUsage } from './ai/types.js';
export type { VercelChatModelAdapterOptions } from './ai/vercel.js';
export type { VercelChatModelAdapterConfig } from './ai/vercel.js';
export { VercelChatModelAdapter } from './ai/vercel.js';

export type { Command, CommandsPluginConfig } from './plugins/commands.js';
Expand Down
1 change: 1 addition & 0 deletions packages/core/src/mock/mock-model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ export function createMockChatModel(config?: MockChatModelConfig): MockChatModel
let lastRandom = config?.seed ?? Date.now();
return {
calls,
name: 'mock',
generateResponse: async (context: RequestContext): Promise<AssistantResponse> => {
calls.push([context]);
lastRandom = random(lastRandom);
Expand Down

0 comments on commit 21d4f2e

Please sign in to comment.