diff --git a/packages/langbase/src/data/constants.ts b/packages/langbase/src/data/constants.ts index 3bbcb35..4f820f2 100644 --- a/packages/langbase/src/data/constants.ts +++ b/packages/langbase/src/data/constants.ts @@ -2,4 +2,5 @@ export const GENERATION_ENDPOINTS = [ '/v1/pipes/run', '/beta/chat', '/beta/generate', -] + '/v1/llm/run', +]; diff --git a/packages/langbase/src/langbase/langbase.ts b/packages/langbase/src/langbase/langbase.ts index 183624c..a9845c0 100644 --- a/packages/langbase/src/langbase/langbase.ts +++ b/packages/langbase/src/langbase/langbase.ts @@ -34,16 +34,20 @@ export interface LlmOptionsBase { presence_penalty?: number; frequency_penalty?: number; stop?: string[]; + tools?: Tools[]; tool_choice?: 'auto' | 'required' | ToolChoice; parallel_tool_calls?: boolean; + reasoning_effort?: string | null; + max_completion_tokens?: number; + response_format?: ResponseFormat; customModelParams?: Record; } -export interface LlmOptionsT extends LlmOptionsBase { +export interface LlmOptions extends LlmOptionsBase { stream?: false; } -export interface LlmOptionsStreamT extends LlmOptionsBase { +export interface LlmOptionsStream extends LlmOptionsBase { stream: true; } @@ -135,6 +139,19 @@ export interface MessageContentType { }; } +export type ResponseFormat = + | {type: 'text'} + | {type: 'json_object'} + | { + type: 'json_schema'; + json_schema: { + description?: string; + name: string; + schema?: Record; + strict?: boolean | null; + }; + }; + export interface ThreadMessage extends Message { attachments?: any[]; metadata?: Record; @@ -551,8 +568,10 @@ export class Langbase { public parse: (options: ParseOptions) => Promise; public llm: { - (options: LlmOptionsStreamT): Promise; - (options: LlmOptionsT): Promise; + run: { + (options: LlmOptionsStream): Promise; + (options: LlmOptions): Promise; + }; }; constructor(options?: LangbaseOptions) { @@ -634,7 +653,9 @@ export class Langbase { }, }; - this.llm = this.runLlm.bind(this); + this.llm = { + run: this.runLlm.bind(this), + }; } private async runPipe( @@ -1057,14 +1078,12 @@ export class Langbase { } // Add the private implementation - private async runLlm( - options: LlmOptionsStreamT, - ): Promise; + private async runLlm(options: LlmOptionsStream): Promise; - private async runLlm(options: LlmOptionsT): Promise; + private async runLlm(options: LlmOptions): Promise; private async runLlm( - options: LlmOptionsT | LlmOptionsStreamT, + options: LlmOptions | LlmOptionsStream, ): Promise { if (!options.llmKey) { throw new Error('LLM API key is required to run this LLM.'); @@ -1076,7 +1095,7 @@ export class Langbase { } return this.request.post({ - endpoint: '/v1/llm', + endpoint: '/v1/llm/run', body: options, headers: { ...(options.llmKey && {'LB-LLM-Key': options.llmKey}),