Skip to content

Commit c0096a1

Browse files
committed
add tests
1 parent ffd6c61 commit c0096a1

File tree

2 files changed

+87
-0
lines changed

2 files changed

+87
-0
lines changed
Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
import { expect, test } from 'vitest';
2+
import { createMockVercelModel } from '../../mock/vercel-mock-model.js';
3+
import { createApp } from '../../application.js';
4+
import { Message } from '../../domain.js';
5+
import { VercelChatModelAdapter } from '../vercel.js';
6+
7+
const messages: Message[] = [{ role: 'user', content: 'Hello' }];
8+
9+
test('sends system messages when system prompt is provided', async () => {
10+
const { languageModel, calls } = createMockVercelModel({ text: 'Hello, world!' });
11+
const app = createApp({
12+
chatModel: new VercelChatModelAdapter({ languageModel }),
13+
systemPrompt: 'This is system prompt',
14+
});
15+
16+
await app.processMessages(messages);
17+
expect(calls.length).toBe(1);
18+
expect(calls[0].prompt).toContainEqual({
19+
role: 'system',
20+
content: 'This is system prompt',
21+
});
22+
});
23+
24+
test('does not send system message when system prompt is not provided', async () => {
25+
const { languageModel, calls } = createMockVercelModel({ text: 'Hello, world!' });
26+
const app = createApp({
27+
chatModel: new VercelChatModelAdapter({ languageModel }),
28+
});
29+
30+
await app.processMessages(messages);
31+
expect(calls.length).toBe(1);
32+
expect(calls[0].prompt).not.toContainEqual({
33+
role: 'system',
34+
});
35+
});
36+
37+
test('does not send system message when system prompt returns null', async () => {
38+
const { languageModel, calls } = createMockVercelModel({ text: 'Hello, world!' });
39+
const app = createApp({
40+
chatModel: new VercelChatModelAdapter({ languageModel }),
41+
systemPrompt: () => null,
42+
});
43+
44+
await app.processMessages(messages);
45+
expect(calls.length).toBe(1);
46+
expect(calls[0].prompt).not.toContainEqual({
47+
role: 'system',
48+
});
49+
});
50+
51+
test('does not send system message when system prompt returns empty string', async () => {
52+
const { languageModel, calls } = createMockVercelModel({ text: 'Hello, world!' });
53+
const app = createApp({
54+
chatModel: new VercelChatModelAdapter({ languageModel }),
55+
systemPrompt: () => '',
56+
});
57+
58+
await app.processMessages(messages);
59+
expect(calls.length).toBe(1);
60+
expect(calls[0].prompt).not.toContainEqual({
61+
role: 'system',
62+
});
63+
});
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
import { LanguageModelV1CallOptions } from 'ai';
2+
import { MockLanguageModelV1 } from 'ai/test';
3+
4+
export type MockVercelModelOptions = {
5+
text: string;
6+
};
7+
8+
export function createMockVercelModel({ text }: MockVercelModelOptions) {
9+
const calls: LanguageModelV1CallOptions[] = [];
10+
const languageModel = new MockLanguageModelV1({
11+
doGenerate: (options) => {
12+
calls.push(options);
13+
return Promise.resolve({
14+
rawCall: { rawPrompt: null, rawSettings: {} },
15+
finishReason: 'stop',
16+
usage: { promptTokens: 10, completionTokens: 20 },
17+
text,
18+
});
19+
},
20+
// TODO: add doStream when needed
21+
});
22+
23+
return { languageModel, calls };
24+
}

0 commit comments

Comments
 (0)