Skip to content

Commit 05a63bd

Browse files
committed
chore: remove max_tokens default
1 parent 79acd17 commit 05a63bd

File tree

1 file changed

+4
-7
lines changed

1 file changed

+4
-7
lines changed

packages/core/src/ai/vercel.ts

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,6 @@ import { RequestContext, Message } from '../domain.js';
1616
import { ApplicationTool } from '../tools.js';
1717
import type { ChatModel, AssistantResponse, ModelUsage } from './types.js';
1818

19-
const DEFAULT_MAX_TOKENS = 1024;
20-
const DEFAULT_MAX_STEPS = 5;
21-
2219
// Workaround for memory issue happening when sending image attachment. The attachments get inefficiently serialised causing a memory spike.
2320
const VERCEL_AI_SHARED_OPTIONS = {
2421
experimental_telemetry: {
@@ -127,9 +124,9 @@ export class VercelChatModelAdapter implements ChatModel {
127124
const result = await streamText({
128125
...VERCEL_AI_SHARED_OPTIONS,
129126
model: this._options.languageModel,
130-
maxTokens: this._options.maxTokens ?? DEFAULT_MAX_TOKENS,
131-
maxSteps: this._options.maxSteps ?? DEFAULT_MAX_STEPS,
132127
messages: context.messages,
128+
maxTokens: this._options.maxTokens,
129+
maxSteps: this._options.maxSteps,
133130
tools: context.tools,
134131
});
135132

@@ -160,9 +157,9 @@ export class VercelChatModelAdapter implements ChatModel {
160157
const result = await generateText({
161158
...VERCEL_AI_SHARED_OPTIONS,
162159
model: this._options.languageModel,
163-
maxTokens: this._options.maxTokens ?? DEFAULT_MAX_TOKENS,
164-
maxSteps: this._options.maxSteps ?? DEFAULT_MAX_STEPS,
165160
messages: context.messages,
161+
maxTokens: this._options.maxTokens,
162+
maxSteps: this._options.maxSteps,
166163
tools: context.tools,
167164
});
168165
const responseTime = performance.now() - startTime;

0 commit comments

Comments
 (0)