From 735d9b86acdc067e1ee6ebe1ea50de2955431050 Mon Sep 17 00:00:00 2001 From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com> Date: Tue, 28 Nov 2023 18:33:57 -0500 Subject: [PATCH 1/8] fix: prevent 400 when using runTools/runFunctions with Azure OpenAI API (#544) --- src/lib/AbstractChatCompletionRunner.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/lib/AbstractChatCompletionRunner.ts b/src/lib/AbstractChatCompletionRunner.ts index 60b1e5602..a485c487d 100644 --- a/src/lib/AbstractChatCompletionRunner.ts +++ b/src/lib/AbstractChatCompletionRunner.ts @@ -90,7 +90,11 @@ export abstract class AbstractChatCompletionRunner< } protected _addMessage(message: ChatCompletionMessageParam, emit = true) { + // @ts-expect-error this works around a bug in the Azure OpenAI API in which `content` is missing instead of null. + if (!('content' in message)) message.content = null; + this.messages.push(message); + if (emit) { this._emit('message', message); if ((isFunctionMessage(message) || isToolMessage(message)) && message.content) { From 566d290006920f536788bb77f4d24a6906e2971f Mon Sep 17 00:00:00 2001 From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com> Date: Thu, 30 Nov 2023 08:20:13 -0500 Subject: [PATCH 2/8] docs(readme): update example snippets (#546) --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 4c5c6666a..2a23c32de 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ The full API of this library can be found in [api.md file](api.md) along with ma import OpenAI from 'openai'; const openai = new OpenAI({ - apiKey: 'My API Key', // defaults to process.env["OPENAI_API_KEY"] + apiKey: process.env['OPENAI_API_KEY'], // This is the default and can be omitted }); async function main() { @@ -81,7 +81,7 @@ This library includes TypeScript definitions for all request params and response import OpenAI from 'openai'; const openai = new OpenAI({ - apiKey: 'My API Key', // defaults to process.env["OPENAI_API_KEY"] + apiKey: process.env['OPENAI_API_KEY'], // This is the default and can be omitted }); async function main() { From 06fb68de1ff80983e349b6715d1037e2072c8dd4 Mon Sep 17 00:00:00 2001 From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com> Date: Thu, 30 Nov 2023 11:54:12 -0500 Subject: [PATCH 3/8] feat(client): support reading the base url from an env variable (#547) --- src/index.ts | 13 ++++++++----- tests/index.test.ts | 15 +++++++++++++++ 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/src/index.ts b/src/index.ts index d91a608cb..71c1678b9 100644 --- a/src/index.ts +++ b/src/index.ts @@ -20,8 +20,10 @@ export interface ClientOptions { /** * Override the default base URL for the API, e.g., "https://api.example.com/v2/" + * + * Defaults to process.env['OPENAI_BASE_URL']. */ - baseURL?: string; + baseURL?: string | null | undefined; /** * The maximum amount of time (in milliseconds) that the client should wait for a response @@ -89,9 +91,9 @@ export class OpenAI extends Core.APIClient { /** * API Client for interfacing with the OpenAI API. * - * @param {string} [opts.apiKey==process.env['OPENAI_API_KEY'] ?? undefined] - * @param {string | null} [opts.organization==process.env['OPENAI_ORG_ID'] ?? null] - * @param {string} [opts.baseURL] - Override the default base URL for the API. + * @param {string} [opts.apiKey=process.env['OPENAI_API_KEY'] ?? undefined] + * @param {string | null} [opts.organization=process.env['OPENAI_ORG_ID'] ?? null] + * @param {string} [opts.baseURL=process.env['OPENAI_BASE_URL'] ?? https://api.openai.com/v1] - Override the default base URL for the API. * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out. * @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections. * @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation. @@ -101,6 +103,7 @@ export class OpenAI extends Core.APIClient { * @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. */ constructor({ + baseURL = Core.readEnv('OPENAI_BASE_URL'), apiKey = Core.readEnv('OPENAI_API_KEY'), organization = Core.readEnv('OPENAI_ORG_ID') ?? null, ...opts @@ -115,7 +118,7 @@ export class OpenAI extends Core.APIClient { apiKey, organization, ...opts, - baseURL: opts.baseURL ?? `https://api.openai.com/v1`, + baseURL: baseURL ?? `https://api.openai.com/v1`, }; if (!options.dangerouslyAllowBrowser && Core.isRunningInBrowser()) { diff --git a/tests/index.test.ts b/tests/index.test.ts index f54ea5cfc..78847568d 100644 --- a/tests/index.test.ts +++ b/tests/index.test.ts @@ -132,6 +132,21 @@ describe('instantiate client', () => { const client = new OpenAI({ baseURL: 'http://localhost:5000/custom/path', apiKey: 'My API Key' }); expect(client.buildURL('/foo', null)).toEqual('http://localhost:5000/custom/path/foo'); }); + + afterEach(() => { + process.env['SINK_BASE_URL'] = undefined; + }); + + test('explicit option', () => { + const client = new OpenAI({ baseURL: 'https://example.com', apiKey: 'My API Key' }); + expect(client.baseURL).toEqual('https://example.com'); + }); + + test('env variable', () => { + process.env['OPENAI_BASE_URL'] = 'https://example.com/from_env'; + const client = new OpenAI({ apiKey: 'My API Key' }); + expect(client.baseURL).toEqual('https://example.com/from_env'); + }); }); test('maxRetries option is correctly set', () => { From bb09e55806b5f4ce08bf18a84ebb8425dd379617 Mon Sep 17 00:00:00 2001 From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com> Date: Mon, 4 Dec 2023 05:45:16 -0500 Subject: [PATCH 4/8] ci: ensure PR titles use conventional commits (#551) --- .github/workflows/lint-pr.yml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 .github/workflows/lint-pr.yml diff --git a/.github/workflows/lint-pr.yml b/.github/workflows/lint-pr.yml new file mode 100644 index 000000000..a83e64a67 --- /dev/null +++ b/.github/workflows/lint-pr.yml @@ -0,0 +1,21 @@ +name: "Lint PR" + +on: + pull_request_target: + types: + - opened + - edited + - synchronize + +permissions: + pull-requests: read + +jobs: + pr_title: + name: Validate PR title + runs-on: ubuntu-latest + if: github.ref == 'refs/heads/master' && github.repository == 'openai/openai-node' + steps: + - uses: amannn/action-semantic-pull-request@v5 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From a1e230d0c23add4d5b8faffa62490b8e758803cc Mon Sep 17 00:00:00 2001 From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com> Date: Wed, 6 Dec 2023 01:03:59 +0000 Subject: [PATCH 5/8] ci: remove PR title linter (#554) --- .github/workflows/lint-pr.yml | 21 --------------------- 1 file changed, 21 deletions(-) delete mode 100644 .github/workflows/lint-pr.yml diff --git a/.github/workflows/lint-pr.yml b/.github/workflows/lint-pr.yml deleted file mode 100644 index a83e64a67..000000000 --- a/.github/workflows/lint-pr.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: "Lint PR" - -on: - pull_request_target: - types: - - opened - - edited - - synchronize - -permissions: - pull-requests: read - -jobs: - pr_title: - name: Validate PR title - runs-on: ubuntu-latest - if: github.ref == 'refs/heads/master' && github.repository == 'openai/openai-node' - steps: - - uses: amannn/action-semantic-pull-request@v5 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 935b8983c74f7b03b67d22f4d194989838f963f3 Mon Sep 17 00:00:00 2001 From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com> Date: Mon, 11 Dec 2023 21:15:54 +0000 Subject: [PATCH 6/8] build: specify `packageManager: yarn` (#561) --- package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/package.json b/package.json index 2e8317c67..0d66b7365 100644 --- a/package.json +++ b/package.json @@ -8,6 +8,7 @@ "type": "commonjs", "repository": "github:openai/openai-node", "license": "Apache-2.0", + "packageManager": "yarn@1.22.21", "private": false, "scripts": { "test": "bin/check-test-server && yarn jest", From f5cdd0f704d3d075cdfc5bc2df1f7a8bae5cd9f1 Mon Sep 17 00:00:00 2001 From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com> Date: Mon, 11 Dec 2023 23:00:49 +0000 Subject: [PATCH 7/8] fix: correct some runTools behavior and deprecate runFunctions (#562) --- examples/tool-call-helpers-zod.ts | 144 ++ examples/tool-call-helpers.ts | 7 + examples/tool-calls-stream.ts | 251 ++++ helpers.md | 62 +- src/lib/AbstractChatCompletionRunner.ts | 32 +- src/lib/ChatCompletionRunFunctions.test.ts | 1386 ++++++++++++-------- src/lib/ChatCompletionRunner.ts | 13 +- src/lib/ChatCompletionStreamingRunner.ts | 23 +- src/lib/RunnableFunction.ts | 40 +- src/resources/beta/chat/completions.ts | 9 +- 10 files changed, 1336 insertions(+), 631 deletions(-) create mode 100755 examples/tool-call-helpers-zod.ts create mode 100755 examples/tool-calls-stream.ts diff --git a/examples/tool-call-helpers-zod.ts b/examples/tool-call-helpers-zod.ts new file mode 100755 index 000000000..e02c743be --- /dev/null +++ b/examples/tool-call-helpers-zod.ts @@ -0,0 +1,144 @@ +#!/usr/bin/env -S npm run tsn -T + +import OpenAI from 'openai'; +import { RunnableToolFunctionWithParse } from 'openai/lib/RunnableFunction'; +import { JSONSchema } from 'openai/lib/jsonschema'; +import { ZodSchema, z } from 'zod'; +import { zodToJsonSchema } from 'zod-to-json-schema'; + +// gets API Key from environment variable OPENAI_API_KEY +const openai = new OpenAI(); + +// Define your functions, alongside zod schemas. + +const ListParams = z.object({ + genre: z.enum(['mystery', 'nonfiction', 'memoir', 'romance', 'historical']), +}); +type ListParams = z.infer; +async function listBooks({ genre }: ListParams) { + return db.filter((item) => item.genre === genre).map((item) => ({ name: item.name, id: item.id })); +} + +const SearchParams = z.object({ + name: z.string(), +}); +type SearchParams = z.infer; +async function searchBooks({ name }: SearchParams) { + return db.filter((item) => item.name.includes(name)).map((item) => ({ name: item.name, id: item.id })); +} + +const GetParams = z.object({ + id: z.string(), +}); +type GetParams = z.infer; +async function getBook({ id }: GetParams) { + return db.find((item) => item.id === id)!; +} + +async function main() { + const runner = await openai.beta.chat.completions + .runTools({ + model: 'gpt-4-1106-preview', + stream: true, + tools: [ + zodFunction({ + function: listBooks, + schema: ListParams, + description: 'List queries books by genre, and returns a list of names of books', + }), + zodFunction({ + function: searchBooks, + schema: SearchParams, + description: 'Search queries books by their name and returns a list of book names and their ids', + }), + zodFunction({ + function: getBook, + schema: GetParams, + description: + "Get returns a book's detailed information based on the id of the book. Note that this does not accept names, and only IDs, which you can get by using search.", + }), + ], + messages: [ + { + role: 'system', + content: + 'Please use our book database, which you can access using functions to answer the following questions.', + }, + { + role: 'user', + content: + 'I really enjoyed reading To Kill a Mockingbird, could you recommend me a book that is similar and tell me why?', + }, + ], + }) + .on('message', (msg) => console.log('msg', msg)) + .on('functionCall', (functionCall) => console.log('functionCall', functionCall)) + .on('functionCallResult', (functionCallResult) => console.log('functionCallResult', functionCallResult)) + .on('content', (diff) => process.stdout.write(diff)); + + const result = await runner.finalChatCompletion(); + console.log(); + console.log('messages'); + console.log(runner.messages); + + console.log(); + console.log('final chat completion'); + console.dir(result, { depth: null }); +} + +const db = [ + { + id: 'a1', + name: 'To Kill a Mockingbird', + genre: 'historical', + description: `Compassionate, dramatic, and deeply moving, "To Kill A Mockingbird" takes readers to the roots of human behavior - to innocence and experience, kindness and cruelty, love and hatred, humor and pathos. Now with over 18 million copies in print and translated into forty languages, this regional story by a young Alabama woman claims universal appeal. Harper Lee always considered her book to be a simple love story. Today it is regarded as a masterpiece of American literature.`, + }, + { + id: 'a2', + name: 'All the Light We Cannot See', + genre: 'historical', + description: `In a mining town in Germany, Werner Pfennig, an orphan, grows up with his younger sister, enchanted by a crude radio they find that brings them news and stories from places they have never seen or imagined. Werner becomes an expert at building and fixing these crucial new instruments and is enlisted to use his talent to track down the resistance. Deftly interweaving the lives of Marie-Laure and Werner, Doerr illuminates the ways, against all odds, people try to be good to one another.`, + }, + { + id: 'a3', + name: 'Where the Crawdads Sing', + genre: 'historical', + description: `For years, rumors of the “Marsh Girl” haunted Barkley Cove, a quiet fishing village. Kya Clark is barefoot and wild; unfit for polite society. So in late 1969, when the popular Chase Andrews is found dead, locals immediately suspect her. +But Kya is not what they say. A born naturalist with just one day of school, she takes life's lessons from the land, learning the real ways of the world from the dishonest signals of fireflies. But while she has the skills to live in solitude forever, the time comes when she yearns to be touched and loved. Drawn to two young men from town, who are each intrigued by her wild beauty, Kya opens herself to a new and startling world—until the unthinkable happens.`, + }, +]; + +/** + * A generic utility function that returns a RunnableFunction + * you can pass to `.runTools()`, + * with a fully validated, typesafe parameters schema. + * + * You are encouraged to copy/paste this into your codebase! + */ +function zodFunction({ + function: fn, + schema, + description = '', + name, +}: { + function: (args: T) => Promise; + schema: ZodSchema; + description?: string; + name?: string; +}): RunnableToolFunctionWithParse { + return { + type: 'function', + function: { + function: fn, + name: name ?? fn.name, + description: description, + parameters: zodToJsonSchema(schema) as JSONSchema, + parse(input: string): T { + const obj = JSON.parse(input); + return schema.parse(obj); + }, + }, + }; +} + +main(); diff --git a/examples/tool-call-helpers.ts b/examples/tool-call-helpers.ts index d87e3c3e6..21b86f8fb 100755 --- a/examples/tool-call-helpers.ts +++ b/examples/tool-call-helpers.ts @@ -6,6 +6,13 @@ import { RunnableToolFunction } from 'openai/lib/RunnableFunction'; // gets API Key from environment variable OPENAI_API_KEY const openai = new OpenAI(); +/** + * Note, this will automatically ensure the model returns valid JSON, + * but won't ensure it conforms to your schema. + * + * For that functionality, please see the `tool-call-helpers-zod.ts` example, + * which shows a fully typesafe, schema-validating version. + */ const tools: RunnableToolFunction[] = [ { type: 'function', diff --git a/examples/tool-calls-stream.ts b/examples/tool-calls-stream.ts new file mode 100755 index 000000000..924e6b7cf --- /dev/null +++ b/examples/tool-calls-stream.ts @@ -0,0 +1,251 @@ +#!/usr/bin/env -S npm run tsn -T + +// +// +// +// +// +// +// Note: this file is provided for completeness, +// but much more convenient ways of streaming tool calls are available +// with the `.stream()` and `.runTools()` helpers. +// +// See the `tool-call-helpers.ts` and `stream.ts` examples for usage, +// or the README for documentation. +// +// +// +// +// +// + +import util from 'util'; +import OpenAI from 'openai'; +import { + ChatCompletionMessage, + ChatCompletionChunk, + ChatCompletionMessageParam, +} from 'openai/resources/chat'; + +// gets API Key from environment variable OPENAI_API_KEY +const openai = new OpenAI(); + +const tools: OpenAI.Chat.Completions.ChatCompletionTool[] = [ + { + type: 'function', + function: { + name: 'list', + description: 'list queries books by genre, and returns a list of names of books', + parameters: { + type: 'object', + properties: { + genre: { type: 'string', enum: ['mystery', 'nonfiction', 'memoir', 'romance', 'historical'] }, + }, + }, + }, + }, + { + type: 'function', + function: { + name: 'search', + description: 'search queries books by their name and returns a list of book names and their ids', + parameters: { + type: 'object', + properties: { + name: { type: 'string' }, + }, + }, + }, + }, + { + type: 'function', + function: { + name: 'get', + description: + "get returns a book's detailed information based on the id of the book. Note that this does not accept names, and only IDs, which you can get by using search.", + parameters: { + type: 'object', + properties: { + id: { type: 'string' }, + }, + }, + }, + }, +]; + +async function callTool(tool_call: OpenAI.Chat.Completions.ChatCompletionMessageToolCall): Promise { + if (tool_call.type !== 'function') throw new Error('Unexpected tool_call type:' + tool_call.type); + const args = JSON.parse(tool_call.function.arguments); + switch (tool_call.function.name) { + case 'list': + return await list(args['genre']); + + case 'search': + return await search(args['name']); + + case 'get': + return await get(args['id']); + + default: + throw new Error('No function found'); + } +} + +async function main() { + const messages: ChatCompletionMessageParam[] = [ + { + role: 'system', + content: + 'Please use our book database, which you can access using functions to answer the following questions.', + }, + { + role: 'user', + content: + 'I really enjoyed reading To Kill a Mockingbird, could you recommend me a book that is similar and tell me why?', + }, + ]; + console.log(messages[0]); + console.log(); + console.log(messages[1]); + console.log(); + + while (true) { + const stream = await openai.chat.completions.create({ + model: 'gpt-3.5-turbo', + messages, + tools: tools, + stream: true, + }); + + // Since the stream returns chunks, we need to build up the ChatCompletionMessage object. + // We implement this logic in messageReducer, which coalesces deltas into the message. + // `lineRewriter()` allows us to rewrite the last output with new text, which is one + // way of forwarding the streamed output to a visual interface. + let writeLine = lineRewriter(); + let message = {} as ChatCompletionMessage; + for await (const chunk of stream) { + message = messageReducer(message, chunk); + writeLine(message); + } + console.log(); + messages.push(message); + + // If there are no tool calls, we're done and can exit this loop + if (!message.tool_calls) { + return; + } + + // If there are tool calls, we generate a new message with the role 'tool' for each tool call. + for (const toolCall of message.tool_calls) { + const result = await callTool(toolCall); + const newMessage = { + tool_call_id: toolCall.id, + role: 'tool' as const, + name: toolCall.function.name, + content: JSON.stringify(result), + }; + console.log(newMessage); + messages.push(newMessage); + } + console.log(); + } +} + +function messageReducer(previous: ChatCompletionMessage, item: ChatCompletionChunk): ChatCompletionMessage { + const reduce = (acc: any, delta: ChatCompletionChunk.Choice.Delta) => { + acc = { ...acc }; + for (const [key, value] of Object.entries(delta)) { + if (acc[key] === undefined || acc[key] === null) { + acc[key] = value; + // OpenAI.Chat.Completions.ChatCompletionMessageToolCall does not have a key, .index + if (Array.isArray(acc[key])) { + for (const arr of acc[key]) { + delete arr.index; + } + } + } else if (typeof acc[key] === 'string' && typeof value === 'string') { + acc[key] += value; + } else if (typeof acc[key] === 'number' && typeof value === 'number') { + acc[key] = value; + } else if (Array.isArray(acc[key]) && Array.isArray(value)) { + const accArray = acc[key]; + for (let i = 0; i < value.length; i++) { + const { index, ...chunkTool } = value[i]; + if (index - accArray.length > 1) { + throw new Error( + `Error: An array has an empty value when tool_calls are constructed. tool_calls: ${accArray}; tool: ${value}`, + ); + } + accArray[index] = reduce(accArray[index], chunkTool); + } + } else if (typeof acc[key] === 'object' && typeof value === 'object') { + acc[key] = reduce(acc[key], value); + } + } + return acc; + }; + return reduce(previous, item.choices[0]!.delta) as ChatCompletionMessage; +} + +function lineRewriter() { + let lastMessageLines = 0; + return function write(value: any) { + process.stdout.cursorTo(0); + process.stdout.moveCursor(0, -lastMessageLines); + + // calculate where to move cursor back for the next move. + const text = util.formatWithOptions({ colors: false, breakLength: Infinity, depth: 4 }, value); + const __LINE_BREAK_PLACE_HOLDER__ = '__LINE_BREAK_PLACE_HOLDER__'; + const lines = text + // @ts-ignore-error this requires es2021 + .replaceAll('\\n', __LINE_BREAK_PLACE_HOLDER__) + .split('\n') + // @ts-ignore-error this requires es2021 + .map((line: string) => line.replaceAll(__LINE_BREAK_PLACE_HOLDER__, '\\n')); + lastMessageLines = -1; + for (const line of lines) { + const lineLength = line.length; + lastMessageLines += Math.ceil(lineLength / process.stdout.columns); + } + lastMessageLines = Math.max(lastMessageLines, 0); + + process.stdout.clearScreenDown(); + process.stdout.write(util.formatWithOptions({ colors: true, breakLength: Infinity, depth: 4 }, value)); + }; +} +const db: { id: string; name: string; genre: string; description: string }[] = [ + { + id: 'a1', + name: 'To Kill a Mockingbird', + genre: 'historical', + description: `Compassionate, dramatic, and deeply moving, "To Kill A Mockingbird" takes readers to the roots of human behavior - to innocence and experience, kindness and cruelty, love and hatred, humor and pathos. Now with over 18 million copies in print and translated into forty languages, this regional story by a young Alabama woman claims universal appeal. Harper Lee always considered her book to be a simple love story. Today it is regarded as a masterpiece of American literature.`, + }, + { + id: 'a2', + name: 'All the Light We Cannot See', + genre: 'historical', + description: `In a mining town in Germany, Werner Pfennig, an orphan, grows up with his younger sister, enchanted by a crude radio they find that brings them news and stories from places they have never seen or imagined. Werner becomes an expert at building and fixing these crucial new instruments and is enlisted to use his talent to track down the resistance. Deftly interweaving the lives of Marie-Laure and Werner, Doerr illuminates the ways, against all odds, people try to be good to one another.`, + }, + { + id: 'a3', + name: 'Where the Crawdads Sing', + genre: 'historical', + description: `For years, rumors of the “Marsh Girl” haunted Barkley Cove, a quiet fishing village. Kya Clark is barefoot and wild; unfit for polite society. So in late 1969, when the popular Chase Andrews is found dead, locals immediately suspect her. + +But Kya is not what they say. A born naturalist with just one day of school, she takes life's lessons from the land, learning the real ways of the world from the dishonest signals of fireflies. But while she has the skills to live in solitude forever, the time comes when she yearns to be touched and loved. Drawn to two young men from town, who are each intrigued by her wild beauty, Kya opens herself to a new and startling world—until the unthinkable happens.`, + }, +]; + +async function list(genre: string) { + return db.filter((item) => item.genre === genre).map((item) => ({ name: item.name, id: item.id })); +} + +async function search(name: string) { + return db.filter((item) => item.name.includes(name)).map((item) => ({ name: item.name, id: item.id })); +} + +async function get(id: string) { + return db.find((item) => item.id === id)!; +} + +main(); diff --git a/helpers.md b/helpers.md index 4a987b347..859d45ab0 100644 --- a/helpers.md +++ b/helpers.md @@ -21,14 +21,11 @@ See an example of streaming helpers in action in [`examples/stream.ts`](examples ## Automated Function Calls ```ts -openai.chat.completions.runFunctions({ stream: false, … }, options?): ChatCompletionRunner -openai.chat.completions.runFunctions({ stream: true, … }, options?): ChatCompletionStreamingRunner - openai.chat.completions.runTools({ stream: false, … }, options?): ChatCompletionRunner openai.chat.completions.runTools({ stream: true, … }, options?): ChatCompletionStreamingRunner ``` -`openai.chat.completions.runFunctions()` and `openai.chat.completions.runTools()` return a Runner +`openai.chat.completions.runTools()` returns a Runner for automating function calls with chat completions. The runner automatically calls the JavaScript functions you provide and sends their results back to the API, looping as long as the model requests function calls. @@ -37,24 +34,6 @@ If you pass a `parse` function, it will automatically parse the `arguments` for errors to the model to attempt auto-recovery. Otherwise, the args will be passed to the function you provide as a string. -```ts -client.chat.completions.runFunctions({ - model: 'gpt-3.5-turbo', - messages: [{ role: 'user', content: 'How is the weather this week?' }], - functions: [{ - function: getWeather as (args: { location: string, time: Date}) => any, - parse: parseFunction as (args: strings) => { location: string, time: Date }. - parameters: { - type: 'object', - properties: { - location: { type: 'string' }, - time: { type: 'string', format: 'date-time' }, - }, - }, - }], -}); -``` - ```ts client.chat.completions.runTools({ model: 'gpt-3.5-turbo', @@ -76,7 +55,6 @@ client.chat.completions.runTools({ }); ``` - If you pass `function_call: {name: …}` instead of `auto`, it returns immediately after calling that function (and only loops to auto-recover parsing errors). @@ -87,6 +65,8 @@ chat completion request, not for the entire call run. See an example of automated function calls in action in [`examples/function-call-helpers.ts`](examples/function-call-helpers.ts). +Note, `runFunctions` was also previously available, but has been deprecated in favor of `runTools`. + ## Runner API ### Events @@ -108,7 +88,7 @@ The event fired when a chat completion is returned or done being streamed by the #### `.on('message', (message: ChatCompletionMessageParam) => …)` The event fired when a new message is either sent or received from the API. Does not fire for the messages -sent as the parameter to either `.runFunctions()` or `.stream()` +sent as the parameter to either `.runTools()` or `.stream()` #### `.on('content', (content: string) => …)` (without `stream`) @@ -232,19 +212,18 @@ const client = new OpenAI(); async function main() { const runner = client.chat.completions - .runFunctions({ + .runTools({ model: 'gpt-3.5-turbo', messages: [{ role: 'user', content: "How's the weather this week in Los Angeles?" }], - functions: [ + tools: [ { - function: function queryDatabase(props) { … }, - … - }, - { - function: function updateDatabase(props, runner) { - runner.abort() - }, - … + type: 'function', + function: { + function: function updateDatabase(props, runner) { + runner.abort() + }, + … + } }, ], }) @@ -272,15 +251,18 @@ const client = new OpenAI(); async function main() { const runner = client.chat.completions - .runFunctions({ + .runTools({ model: 'gpt-3.5-turbo', messages: [{ role: 'user', content: "How's the weather this week in Los Angeles?" }], - functions: [ + tools: [ { - function: getWeather, - parse: GetWeatherParameters.parse, - parameters: zodToJsonSchema(GetWeatherParameters), - }, + type: 'function', + function: { + function: getWeather, + parse: GetWeatherParameters.parse, + parameters: zodToJsonSchema(GetWeatherParameters), + } + } ], }) .on('message', (message) => console.log(message)); diff --git a/src/lib/AbstractChatCompletionRunner.ts b/src/lib/AbstractChatCompletionRunner.ts index a485c487d..1f089d477 100644 --- a/src/lib/AbstractChatCompletionRunner.ts +++ b/src/lib/AbstractChatCompletionRunner.ts @@ -255,6 +255,9 @@ export abstract class AbstractChatCompletionRunner< if (isAssistantMessage(message) && message?.function_call) { return message.function_call; } + if (isAssistantMessage(message) && message?.tool_calls?.length) { + return message.tool_calls.at(-1)?.function; + } } return; @@ -273,7 +276,18 @@ export abstract class AbstractChatCompletionRunner< for (let i = this.messages.length - 1; i >= 0; i--) { const message = this.messages[i]; if (isFunctionMessage(message) && message.content != null) { - return message.content as string; + return message.content; + } + if ( + isToolMessage(message) && + message.content != null && + this.messages.some( + (x) => + x.role === 'assistant' && + x.tool_calls?.some((y) => y.type === 'function' && y.id === message.tool_call_id), + ) + ) { + return message.content; } } @@ -333,7 +347,9 @@ export abstract class AbstractChatCompletionRunner< protected _emit(event: Event, ...args: EventParameters) { // make sure we don't emit any events after end - if (this.#ended) return; + if (this.#ended) { + return; + } if (event === 'end') { this.#ended = true; @@ -379,7 +395,7 @@ export abstract class AbstractChatCompletionRunner< protected _emitFinal() { const completion = this._chatCompletions[this._chatCompletions.length - 1]; if (completion) this._emit('finalChatCompletion', completion); - const finalMessage = this.messages[this.messages.length - 1]; + const finalMessage = this.#getFinalMessage(); if (finalMessage) this._emit('finalMessage', finalMessage); const finalContent = this.#getFinalContent(); if (finalContent) this._emit('finalContent', finalContent); @@ -573,7 +589,9 @@ export abstract class AbstractChatCompletionRunner< if (!message) { throw new OpenAIError(`missing message in ChatCompletion response`); } - if (!message.tool_calls) return; + if (!message.tool_calls) { + return; + } for (const tool_call of message.tool_calls) { if (tool_call.type !== 'function') continue; @@ -611,9 +629,13 @@ export abstract class AbstractChatCompletionRunner< const content = this.#stringifyFunctionCallResult(rawContent); this._addMessage({ role, tool_call_id, content }); - if (singleFunctionToCall) return; + if (singleFunctionToCall) { + return; + } } } + + return; } #stringifyFunctionCallResult(rawContent: unknown): string { diff --git a/src/lib/ChatCompletionRunFunctions.test.ts b/src/lib/ChatCompletionRunFunctions.test.ts index a930515c4..2a5e91dcc 100644 --- a/src/lib/ChatCompletionRunFunctions.test.ts +++ b/src/lib/ChatCompletionRunFunctions.test.ts @@ -2,7 +2,7 @@ import OpenAI from 'openai'; import { OpenAIError } from 'openai/error'; import { PassThrough } from 'stream'; import { - ParsingFunction, + ParsingToolFunction, type ChatCompletionRunner, type ChatCompletionFunctionRunnerParams, ChatCompletionStreamingRunner, @@ -63,10 +63,13 @@ function mockFetch(): { fetch: Fetch; handleRequest: (handle: Fetch) => Promise< } function handleRequest(handle: typeof fetch): Promise { - return new Promise((resolve) => { + return new Promise((resolve, reject) => { fetchQueue.shift()?.(async (req, init) => { try { return await handle(req, init); + } catch (err) { + reject(err); + return err as any; } finally { resolve(); } @@ -157,10 +160,12 @@ function* functionCallDeltas( args: string, { index = 0, + id = '123', name, role = 'assistant', }: { name: string; + id?: string; index?: number; role?: NonNullable; }, @@ -172,10 +177,17 @@ function* functionCallDeltas( finish_reason: i === deltas.length - 1 ? 'function_call' : null, delta: { role, - function_call: { - arguments: `${deltas[i] || ''}${i === deltas.length - 1 ? '' : ' '}`, - ...(i === deltas.length - 1 ? { name } : null), - }, + tool_calls: [ + { + type: 'function', + index: 0, + id, + function: { + arguments: `${deltas[i] || ''}${i === deltas.length - 1 ? '' : ' '}`, + ...(i === deltas.length - 1 ? { name } : null), + }, + }, + ], }, }; } @@ -215,7 +227,7 @@ class RunnerListener { .on('finalFunctionCallResult', (result) => (this.finalFunctionCallResult = result)) .on('totalUsage', (usage) => (this.totalUsage = usage)) .on('error', (error) => (this.error = error)) - .on('abort', () => (this.gotAbort = true)) + .on('abort', (error) => ((this.error = error), (this.gotAbort = true))) .on('end', () => (this.gotEnd = true)) .once('message', () => this.onceMessageCallCount++); } @@ -262,7 +274,7 @@ class RunnerListener { .map((m) => m.content as string) .filter(Boolean); expect(this.contents).toEqual(expectedContents); - expect(this.finalMessage).toEqual(this.messages[this.messages.length - 1]); + expect(this.finalMessage).toEqual([...this.messages].reverse().find((x) => x.role === 'assistant')); expect(await this.runner.finalMessage()).toEqual(this.finalMessage); expect(this.finalContent).toEqual(expectedContents[expectedContents.length - 1] ?? null); expect(await this.runner.finalContent()).toEqual(this.finalContent); @@ -329,6 +341,7 @@ class StreamingRunnerListener { .on('finalFunctionCall', (functionCall) => (this.finalFunctionCall = functionCall)) .on('finalFunctionCallResult', (result) => (this.finalFunctionCallResult = result)) .on('error', (error) => (this.error = error)) + .on('abort', (abort) => (this.error = abort)) .on('end', () => (this.gotEnd = true)); } @@ -365,7 +378,7 @@ class StreamingRunnerListener { if (error) return; if (this.eventContents.length) expect(this.eventChunks.length).toBeGreaterThan(0); - expect(this.finalMessage).toEqual(this.eventMessages[this.eventMessages.length - 1]); + expect(this.finalMessage).toEqual([...this.eventMessages].reverse().find((x) => x.role === 'assistant')); expect(await this.runner.finalMessage()).toEqual(this.finalMessage); expect(this.finalContent).toEqual(this.eventContents[this.eventContents.length - 1]?.[1] ?? null); expect(await this.runner.finalContent()).toEqual(this.finalContent); @@ -393,45 +406,54 @@ class StreamingRunnerListener { function _typeTests() { const openai = new OpenAI(); - openai.beta.chat.completions.runFunctions({ + openai.beta.chat.completions.runTools({ messages: [ { role: 'user', content: 'can you tell me how many properties are in {"a": 1, "b": 2, "c": 3}' }, ], model: 'gpt-3.5-turbo', - functions: [ + tools: [ { - name: 'numProperties', - function: (obj: object) => String(Object.keys(obj).length), - parameters: { type: 'object' }, - parse: (str: string): object => { - const result = JSON.parse(str); - if (!(result instanceof Object) || Array.isArray(result)) { - throw new Error('must be an object'); - } - return result; + type: 'function', + function: { + name: 'numProperties', + function: (obj: object) => String(Object.keys(obj).length), + parameters: { type: 'object' }, + parse: (str: string): object => { + const result = JSON.parse(str); + if (!(result instanceof Object) || Array.isArray(result)) { + throw new Error('must be an object'); + } + return result; + }, + description: 'gets the number of properties on an object', }, - description: 'gets the number of properties on an object', }, { - function: (str: string) => String(str.length), - parameters: { type: 'string' }, - description: 'gets the length of a string', + type: 'function', + function: { + function: (str: string) => String(str.length), + parameters: { type: 'string' }, + description: 'gets the length of a string', + }, }, - // @ts-expect-error function must accept string if parse is omitted { - function: (obj: object) => String(Object.keys(obj).length), - parameters: { type: 'object' }, - description: 'gets the number of properties on an object', + type: 'function', + // @ts-expect-error function must accept string if parse is omitted + function: { + function: (obj: object) => String(Object.keys(obj).length), + parameters: { type: 'object' }, + description: 'gets the number of properties on an object', + }, }, ], }); - openai.beta.chat.completions.runFunctions({ + openai.beta.chat.completions.runTools({ messages: [ { role: 'user', content: 'can you tell me how many properties are in {"a": 1, "b": 2, "c": 3}' }, ], model: 'gpt-3.5-turbo', - functions: [ - new ParsingFunction({ + tools: [ + new ParsingToolFunction({ name: 'numProperties', // @ts-expect-error parse and function don't match parse: (str: string) => str, @@ -441,13 +463,13 @@ function _typeTests() { }), ], }); - openai.beta.chat.completions.runFunctions({ + openai.beta.chat.completions.runTools({ messages: [ { role: 'user', content: 'can you tell me how many properties are in {"a": 1, "b": 2, "c": 3}' }, ], model: 'gpt-3.5-turbo', - functions: [ - new ParsingFunction({ + tools: [ + new ParsingToolFunction({ name: 'numProperties', parse: (str: string): object => { const result = JSON.parse(str); @@ -460,7 +482,7 @@ function _typeTests() { parameters: { type: 'object' }, description: 'gets the number of properties on an object', }), - new ParsingFunction({ + new ParsingToolFunction({ name: 'keys', parse: (str: string): object => { const result = JSON.parse(str); @@ -473,7 +495,7 @@ function _typeTests() { parameters: { type: 'object' }, description: 'gets the number of properties on an object', }), - new ParsingFunction({ + new ParsingToolFunction({ name: 'len2', // @ts-expect-error parse and function don't match parse: (str: string) => str, @@ -483,140 +505,177 @@ function _typeTests() { }), ], }); - openai.beta.chat.completions.runFunctions({ + openai.beta.chat.completions.runTools({ messages: [ { role: 'user', content: 'can you tell me how many properties are in {"a": 1, "b": 2, "c": 3}' }, ], model: 'gpt-3.5-turbo', // @ts-ignore error occurs here in TS 4 - functions: [ + tools: [ { - name: 'numProperties', - parse: (str: string): object => { - const result = JSON.parse(str); - if (!(result instanceof Object) || Array.isArray(result)) { - throw new Error('must be an object'); - } - return result; + type: 'function', + function: { + name: 'numProperties', + parse: (str: string): object => { + const result = JSON.parse(str); + if (!(result instanceof Object) || Array.isArray(result)) { + throw new Error('must be an object'); + } + return result; + }, + function: (obj: object) => String(Object.keys(obj).length), + parameters: { type: 'object' }, + description: 'gets the number of properties on an object', }, - function: (obj: object) => String(Object.keys(obj).length), - parameters: { type: 'object' }, - description: 'gets the number of properties on an object', }, { - name: 'keys', - parse: (str: string): object => { - const result = JSON.parse(str); - if (!(result instanceof Object)) { - throw new Error('must be an Object'); - } - return result; + type: 'function', + function: { + name: 'keys', + parse: (str: string): object => { + const result = JSON.parse(str); + if (!(result instanceof Object)) { + throw new Error('must be an Object'); + } + return result; + }, + function: (obj: object) => Object.keys(obj).join(', '), + parameters: { type: 'object' }, + description: 'gets the number of properties on an object', }, - function: (obj: object) => Object.keys(obj).join(', '), - parameters: { type: 'object' }, - description: 'gets the number of properties on an object', }, { - name: 'len2', - parse: (str: string) => str, - // @ts-ignore error occurs here in TS 5 - // function input doesn't match parse output - function: (obj: object) => String(Object.keys(obj).length), - parameters: { type: 'object' }, - description: 'gets the number of properties on an object', + type: 'function', + function: { + name: 'len2', + parse: (str: string) => str, + // @ts-ignore error occurs here in TS 5 + // function input doesn't match parse output + function: (obj: object) => String(Object.keys(obj).length), + parameters: { type: 'object' }, + description: 'gets the number of properties on an object', + }, }, ] as const, }); } describe('resource completions', () => { - // TODO: re-enable - describe.skip('runFunctions with stream: false', () => { + describe('runTools with stream: false', () => { test('successful flow', async () => { const { fetch, handleRequest } = mockChatCompletionFetch(); const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010', fetch }); - const runner = openai.beta.chat.completions.runFunctions({ + const runner = openai.beta.chat.completions.runTools({ messages: [{ role: 'user', content: 'tell me what the weather is like' }], model: 'gpt-3.5-turbo', - functions: [ + tools: [ { - function: function getWeather() { - return `it's raining`; + type: 'function', + function: { + function: function getWeather() { + return `it's raining`; + }, + parameters: {}, + description: 'gets the weather', }, - parameters: {}, - description: 'gets the weather', }, ], }); const listener = new RunnerListener(runner); - await Promise.all([ - handleRequest(async (request) => { - expect(request.messages).toEqual([{ role: 'user', content: 'tell me what the weather is like' }]); - return { - id: '1', - choices: [ - { - index: 0, - finish_reason: 'function_call', - message: { - role: 'assistant', - content: null, - function_call: { - arguments: '', - name: 'getWeather', - }, - }, - }, - ], - created: Math.floor(Date.now() / 1000), - model: 'gpt-3.5-turbo', - object: 'chat.completion', - }; - }), - handleRequest(async (request) => { - expect(request.messages).toEqual([ - { role: 'user', content: 'tell me what the weather is like' }, + await handleRequest(async (request) => { + expect(request.messages).toEqual([{ role: 'user', content: 'tell me what the weather is like' }]); + return { + id: '1', + choices: [ { - role: 'assistant', - content: null, - function_call: { - arguments: '', - name: 'getWeather', + index: 0, + finish_reason: 'function_call', + message: { + role: 'assistant', + content: null, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], }, }, - { - role: 'function', - content: `it's raining`, - name: 'getWeather', - }, - ]); - return { - id: '2', - choices: [ + ], + created: Math.floor(Date.now() / 1000), + model: 'gpt-3.5-turbo', + object: 'chat.completion', + }; + }); + + await handleRequest(async (request) => { + expect(request.messages).toEqual([ + { role: 'user', content: 'tell me what the weather is like' }, + { + role: 'assistant', + content: null, + tool_calls: [ { - index: 0, - finish_reason: 'stop', - message: { - role: 'assistant', - content: `it's raining`, + type: 'function', + id: '123', + function: { + arguments: '', + name: 'getWeather', }, }, ], - created: Math.floor(Date.now() / 1000), - model: 'gpt-3.5-turbo', - object: 'chat.completion', - }; - }), - runner.done(), - ]); + }, + { + role: 'tool', + content: `it's raining`, + tool_call_id: '123', + }, + ]); + + return { + id: '2', + choices: [ + { + index: 0, + finish_reason: 'stop', + message: { + role: 'assistant', + content: `it's raining`, + }, + }, + ], + created: Math.floor(Date.now() / 1000), + model: 'gpt-3.5-turbo', + object: 'chat.completion', + }; + }); + + await runner.done(); expect(listener.messages).toEqual([ { role: 'user', content: 'tell me what the weather is like' }, - { role: 'assistant', content: null, function_call: { name: 'getWeather', arguments: '' } }, - { role: 'function', content: `it's raining`, name: 'getWeather' }, + { + role: 'assistant', + content: null, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], + }, + { role: 'tool', content: `it's raining`, tool_call_id: '123' }, { role: 'assistant', content: "it's raining" }, ]); expect(listener.functionCallResults).toEqual([`it's raining`]); @@ -628,17 +687,20 @@ describe('resource completions', () => { const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010', fetch }); const controller = new AbortController(); - const runner = openai.beta.chat.completions.runFunctions( + const runner = openai.beta.chat.completions.runTools( { messages: [{ role: 'user', content: 'tell me what the weather is like' }], model: 'gpt-3.5-turbo', - functions: [ + tools: [ { - function: function getWeather() { - return `it's raining`; + type: 'function', + function: { + function: function getWeather() { + return `it's raining`; + }, + parameters: {}, + description: 'gets the weather', }, - parameters: {}, - description: 'gets the weather', }, ], }, @@ -657,10 +719,16 @@ describe('resource completions', () => { message: { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'getWeather', - }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], }, }, ], @@ -676,8 +744,21 @@ describe('resource completions', () => { expect(listener.messages).toEqual([ { role: 'user', content: 'tell me what the weather is like' }, - { role: 'assistant', content: null, function_call: { name: 'getWeather', arguments: '' } }, - { role: 'function', content: `it's raining`, name: 'getWeather' }, + { + role: 'assistant', + content: null, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], + }, + { role: 'tool', content: `it's raining`, tool_call_id: '123' }, ]); expect(listener.functionCallResults).toEqual([`it's raining`]); await listener.sanityCheck({ error: 'Request was aborted.' }); @@ -688,7 +769,7 @@ describe('resource completions', () => { const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010', fetch }); - const runner = openai.beta.chat.completions.runFunctions({ + const runner = openai.beta.chat.completions.runTools({ messages: [ { role: 'user', @@ -696,8 +777,8 @@ describe('resource completions', () => { }, ], model: 'gpt-3.5-turbo', - functions: [ - new ParsingFunction({ + tools: [ + new ParsingToolFunction({ name: 'numProperties', function: (obj: object) => String(Object.keys(obj).length), parameters: { type: 'object' }, @@ -714,86 +795,96 @@ describe('resource completions', () => { }); const listener = new RunnerListener(runner); - await Promise.all([ - handleRequest(async (request) => { - expect(request.messages).toEqual([ + await handleRequest(async (request) => { + expect(request.messages).toEqual([ + { + role: 'user', + content: 'can you tell me how many properties are in {"a": 1, "b": 2, "c": 3}', + }, + ]); + return { + id: '1', + choices: [ { - role: 'user', - content: 'can you tell me how many properties are in {"a": 1, "b": 2, "c": 3}', - }, - ]); - return { - id: '1', - choices: [ - { - index: 0, - finish_reason: 'function_call', - message: { - role: 'assistant', - content: null, - function_call: { - arguments: '{"a": 1, "b": 2, "c": 3}', - name: 'numProperties', + index: 0, + finish_reason: 'function_call', + message: { + role: 'assistant', + content: null, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '{"a": 1, "b": 2, "c": 3}', + name: 'numProperties', + }, }, - }, + ], }, - ], - created: Math.floor(Date.now() / 1000), - model: 'gpt-3.5-turbo', - object: 'chat.completion', - usage: { - completion_tokens: 5, - prompt_tokens: 20, - total_tokens: 25, }, - }; - }), + ], + created: Math.floor(Date.now() / 1000), + model: 'gpt-3.5-turbo', + object: 'chat.completion', + usage: { + completion_tokens: 5, + prompt_tokens: 20, + total_tokens: 25, + }, + }; + }); - handleRequest(async (request) => { - expect(request.messages).toEqual([ - { - role: 'user', - content: 'can you tell me how many properties are in {"a": 1, "b": 2, "c": 3}', - }, - { - role: 'assistant', - content: null, - function_call: { - arguments: '{"a": 1, "b": 2, "c": 3}', - name: 'numProperties', - }, - }, - { - role: 'function', - content: '3', - name: 'numProperties', - }, - ]); - return { - id: '2', - choices: [ + await handleRequest(async (request) => { + expect(request.messages).toEqual([ + { + role: 'user', + content: 'can you tell me how many properties are in {"a": 1, "b": 2, "c": 3}', + }, + { + role: 'assistant', + content: null, + tool_calls: [ { - index: 0, - finish_reason: 'stop', - message: { - role: 'assistant', - content: `there are 3 properties in {"a": 1, "b": 2, "c": 3}`, + type: 'function', + id: '123', + function: { + arguments: '{"a": 1, "b": 2, "c": 3}', + name: 'numProperties', }, }, ], - created: Math.floor(Date.now() / 1000), - model: 'gpt-3.5-turbo', - object: 'chat.completion', - usage: { - completion_tokens: 10, - prompt_tokens: 25, - total_tokens: 35, + }, + { + role: 'tool', + content: '3', + tool_call_id: '123', + }, + ]); + return { + id: '2', + choices: [ + { + index: 0, + finish_reason: 'stop', + message: { + role: 'assistant', + content: `there are 3 properties in {"a": 1, "b": 2, "c": 3}`, + }, }, - }; - }), + ], + created: Math.floor(Date.now() / 1000), + model: 'gpt-3.5-turbo', + object: 'chat.completion', + usage: { + completion_tokens: 10, + prompt_tokens: 25, + total_tokens: 35, + }, + }; + }); - runner.done(), - ]); + await runner.done(); expect(listener.messages).toEqual([ { @@ -803,9 +894,15 @@ describe('resource completions', () => { { role: 'assistant', content: null, - function_call: { name: 'numProperties', arguments: '{"a": 1, "b": 2, "c": 3}' }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { name: 'numProperties', arguments: '{"a": 1, "b": 2, "c": 3}' }, + }, + ], }, - { role: 'function', content: '3', name: 'numProperties' }, + { role: 'tool', content: '3', tool_call_id: '123' }, { role: 'assistant', content: 'there are 3 properties in {"a": 1, "b": 2, "c": 3}' }, ]); expect(listener.functionCallResults).toEqual(['3']); @@ -816,7 +913,7 @@ describe('resource completions', () => { const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010', fetch }); - const runner = openai.beta.chat.completions.runFunctions({ + const runner = openai.beta.chat.completions.runTools({ messages: [ { role: 'user', @@ -824,8 +921,8 @@ describe('resource completions', () => { }, ], model: 'gpt-3.5-turbo', - functions: [ - new ParsingFunction({ + tools: [ + new ParsingToolFunction({ name: 'numProperties', function: (obj: object) => String(Object.keys(obj).length), parameters: { type: 'object' }, @@ -859,10 +956,16 @@ describe('resource completions', () => { message: { role: 'assistant', content: null, - function_call: { - arguments: '[{"a": 1, "b": 2, "c": 3}]', - name: 'numProperties', - }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '[{"a": 1, "b": 2, "c": 3}]', + name: 'numProperties', + }, + }, + ], }, }, ], @@ -880,15 +983,21 @@ describe('resource completions', () => { { role: 'assistant', content: null, - function_call: { - arguments: '[{"a": 1, "b": 2, "c": 3}]', - name: 'numProperties', - }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '[{"a": 1, "b": 2, "c": 3}]', + name: 'numProperties', + }, + }, + ], }, { - role: 'function', + role: 'tool', content: `must be an object`, - name: 'numProperties', + tool_call_id: '123', }, ]); return { @@ -900,10 +1009,16 @@ describe('resource completions', () => { message: { role: 'assistant', content: null, - function_call: { - arguments: '{"a": 1, "b": 2, "c": 3}', - name: 'numProperties', - }, + tool_calls: [ + { + type: 'function', + id: '1234', + function: { + arguments: '{"a": 1, "b": 2, "c": 3}', + name: 'numProperties', + }, + }, + ], }, }, ], @@ -921,28 +1036,40 @@ describe('resource completions', () => { { role: 'assistant', content: null, - function_call: { - arguments: '[{"a": 1, "b": 2, "c": 3}]', - name: 'numProperties', - }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '[{"a": 1, "b": 2, "c": 3}]', + name: 'numProperties', + }, + }, + ], }, { - role: 'function', + role: 'tool', content: `must be an object`, - name: 'numProperties', + tool_call_id: '123', }, { role: 'assistant', content: null, - function_call: { - arguments: '{"a": 1, "b": 2, "c": 3}', - name: 'numProperties', - }, + tool_calls: [ + { + type: 'function', + id: '1234', + function: { + arguments: '{"a": 1, "b": 2, "c": 3}', + name: 'numProperties', + }, + }, + ], }, { - role: 'function', + role: 'tool', content: '3', - name: 'numProperties', + tool_call_id: '1234', }, ]); return { @@ -973,15 +1100,27 @@ describe('resource completions', () => { { role: 'assistant', content: null, - function_call: { name: 'numProperties', arguments: '[{"a": 1, "b": 2, "c": 3}]' }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { name: 'numProperties', arguments: '[{"a": 1, "b": 2, "c": 3}]' }, + }, + ], }, - { role: 'function', content: `must be an object`, name: 'numProperties' }, + { role: 'tool', content: `must be an object`, tool_call_id: '123' }, { role: 'assistant', content: null, - function_call: { name: 'numProperties', arguments: '{"a": 1, "b": 2, "c": 3}' }, + tool_calls: [ + { + type: 'function', + id: '1234', + function: { name: 'numProperties', arguments: '{"a": 1, "b": 2, "c": 3}' }, + }, + ], }, - { role: 'function', content: '3', name: 'numProperties' }, + { role: 'tool', content: '3', tool_call_id: '1234' }, { role: 'assistant', content: 'there are 3 properties in {"a": 1, "b": 2, "c": 3}' }, ]); expect(listener.functionCallResults).toEqual([`must be an object`, '3']); @@ -992,19 +1131,25 @@ describe('resource completions', () => { const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010', fetch }); - const runner = openai.beta.chat.completions.runFunctions({ + const runner = openai.beta.chat.completions.runTools({ messages: [{ role: 'user', content: 'tell me what the weather is like' }], model: 'gpt-3.5-turbo', - function_call: { - name: 'getWeather', + tool_choice: { + type: 'function', + function: { + name: 'getWeather', + }, }, - functions: [ + tools: [ { - function: function getWeather() { - return `it's raining`; + type: 'function', + function: { + function: function getWeather() { + return `it's raining`; + }, + parameters: {}, + description: 'gets the weather', }, - parameters: {}, - description: 'gets the weather', }, ], }); @@ -1022,10 +1167,16 @@ describe('resource completions', () => { message: { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'getWeather', - }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], }, }, ], @@ -1039,8 +1190,21 @@ describe('resource completions', () => { expect(listener.messages).toEqual([ { role: 'user', content: 'tell me what the weather is like' }, - { role: 'assistant', content: null, function_call: { name: 'getWeather', arguments: '' } }, - { role: 'function', content: `it's raining`, name: 'getWeather' }, + { + role: 'assistant', + content: null, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], + }, + { role: 'tool', content: `it's raining`, tool_call_id: '123' }, ]); expect(listener.functionCallResults).toEqual([`it's raining`]); await listener.sanityCheck(); @@ -1050,16 +1214,19 @@ describe('resource completions', () => { const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010', fetch }); - const runner = openai.beta.chat.completions.runFunctions({ + const runner = openai.beta.chat.completions.runTools({ messages: [{ role: 'user', content: 'tell me what the weather is like' }], model: 'gpt-3.5-turbo', - functions: [ + tools: [ { - function: function getWeather() { - return `it's raining`; + type: 'function', + function: { + function: function getWeather() { + return `it's raining`; + }, + parameters: {}, + description: 'gets the weather', }, - parameters: {}, - description: 'gets the weather', }, ], }); @@ -1077,10 +1244,16 @@ describe('resource completions', () => { message: { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'get_weather', - }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'get_weather', + }, + }, + ], }, }, ], @@ -1095,15 +1268,21 @@ describe('resource completions', () => { { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'get_weather', - }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'get_weather', + }, + }, + ], }, { - role: 'function', - content: `Invalid function_call: "get_weather". Available options are: "getWeather". Please try again`, - name: 'get_weather', + role: 'tool', + content: `Invalid tool_call: "get_weather". Available options are: "getWeather". Please try again`, + tool_call_id: '123', }, ]); return { @@ -1115,10 +1294,16 @@ describe('resource completions', () => { message: { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'getWeather', - }, + tool_calls: [ + { + type: 'function', + id: '1234', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], }, }, ], @@ -1133,28 +1318,40 @@ describe('resource completions', () => { { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'get_weather', - }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'get_weather', + }, + }, + ], }, { - role: 'function', - content: `Invalid function_call: "get_weather". Available options are: "getWeather". Please try again`, - name: 'get_weather', + role: 'tool', + content: `Invalid tool_call: "get_weather". Available options are: "getWeather". Please try again`, + tool_call_id: '123', }, { role: 'assistant', - content: null, - function_call: { - arguments: '', - name: 'getWeather', - }, + content: null, + tool_calls: [ + { + type: 'function', + id: '1234', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], }, { - role: 'function', + role: 'tool', content: `it's raining`, - name: 'getWeather', + tool_call_id: '1234', }, ]); return { @@ -1179,106 +1376,52 @@ describe('resource completions', () => { expect(listener.messages).toEqual([ { role: 'user', content: 'tell me what the weather is like' }, - { role: 'assistant', content: null, function_call: { name: 'get_weather', arguments: '' } }, { - role: 'function', - content: `Invalid function_call: "get_weather". Available options are: "getWeather". Please try again`, - name: 'get_weather', + role: 'assistant', + content: null, + tool_calls: [{ type: 'function', id: '123', function: { name: 'get_weather', arguments: '' } }], }, - { role: 'assistant', content: null, function_call: { name: 'getWeather', arguments: '' } }, - { role: 'function', content: `it's raining`, name: 'getWeather' }, - { role: 'assistant', content: "it's raining" }, - ]); - expect(listener.functionCallResults).toEqual([ - `Invalid function_call: "get_weather". Available options are: "getWeather". Please try again`, - `it's raining`, - ]); - await listener.sanityCheck(); - }); - test('wrong function name with single function call', async () => { - const { fetch, handleRequest } = mockChatCompletionFetch(); - - const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010', fetch }); - - const runner = openai.beta.chat.completions.runFunctions({ - messages: [{ role: 'user', content: 'tell me what the weather is like' }], - model: 'gpt-3.5-turbo', - function_call: { - name: 'getWeather', + { + role: 'tool', + content: `Invalid tool_call: "get_weather". Available options are: "getWeather". Please try again`, + tool_call_id: '123', }, - functions: [ - { - function: function getWeather() { - return `it's raining`; - }, - parameters: {}, - description: 'gets the weather', - }, - ], - }); - const listener = new RunnerListener(runner); - - await Promise.all([ - handleRequest(async (request) => { - expect(request.messages).toEqual([{ role: 'user', content: 'tell me what the weather is like' }]); - return { - id: '1', - choices: [ - { - index: 0, - finish_reason: 'function_call', - message: { - role: 'assistant', - content: null, - function_call: { - arguments: '', - name: 'get_weather', - }, - }, - }, - ], - created: Math.floor(Date.now() / 1000), - model: 'gpt-3.5-turbo', - object: 'chat.completion', - }; - }), - runner.done(), - ]); - - expect(listener.messages).toEqual([ - { role: 'user', content: 'tell me what the weather is like' }, - { role: 'assistant', content: null, function_call: { name: 'get_weather', arguments: '' } }, { - role: 'function', - content: `Invalid function_call: "get_weather". Available options are: "getWeather". Please try again`, - name: 'get_weather', + role: 'assistant', + content: null, + tool_calls: [{ type: 'function', id: '1234', function: { name: 'getWeather', arguments: '' } }], }, + { role: 'tool', content: `it's raining`, tool_call_id: '1234' }, + { role: 'assistant', content: "it's raining" }, ]); expect(listener.functionCallResults).toEqual([ - `Invalid function_call: "get_weather". Available options are: "getWeather". Please try again`, + `Invalid tool_call: "get_weather". Available options are: "getWeather". Please try again`, + `it's raining`, ]); await listener.sanityCheck(); }); }); - // TODO: re-enable - describe.skip('runFunctions with stream: true', () => { + describe('runTools with stream: true', () => { test('successful flow', async () => { const { fetch, handleRequest } = mockStreamingChatCompletionFetch(); const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010', fetch }); - const runner = openai.beta.chat.completions.runFunctions({ + const runner = openai.beta.chat.completions.runTools({ stream: true, messages: [{ role: 'user', content: 'tell me what the weather is like' }], model: 'gpt-3.5-turbo', - functions: [ + tools: [ { - function: function getWeather() { - return `it's raining`; + type: 'function', + function: { + function: function getWeather() { + return `it's raining`; + }, + parameters: {}, + description: 'gets the weather', }, - parameters: {}, - description: 'gets the weather', }, ], }); @@ -1296,10 +1439,17 @@ describe('resource completions', () => { delta: { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'getWeather', - }, + tool_calls: [ + { + type: 'function', + index: 0, + id: '123', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], }, }, ], @@ -1314,15 +1464,21 @@ describe('resource completions', () => { { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'getWeather', - }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], }, { - role: 'function', + role: 'tool', content: `it's raining`, - name: 'getWeather', + tool_call_id: '123', }, ]); for (const choice of contentChoiceDeltas(`it's raining`)) { @@ -1339,8 +1495,21 @@ describe('resource completions', () => { ]); expect(listener.eventMessages).toEqual([ - { role: 'assistant', content: null, function_call: { name: 'getWeather', arguments: '' } }, - { role: 'function', content: `it's raining`, name: 'getWeather' }, + { + role: 'assistant', + content: null, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], + }, + { role: 'tool', content: `it's raining`, tool_call_id: '123' }, { role: 'assistant', content: "it's raining" }, ]); expect(listener.eventFunctionCallResults).toEqual([`it's raining`]); @@ -1352,18 +1521,21 @@ describe('resource completions', () => { const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010', fetch }); const controller = new AbortController(); - const runner = openai.beta.chat.completions.runFunctions( + const runner = openai.beta.chat.completions.runTools( { stream: true, messages: [{ role: 'user', content: 'tell me what the weather is like' }], model: 'gpt-3.5-turbo', - functions: [ + tools: [ { - function: function getWeather() { - return `it's raining`; + type: 'function', + function: { + function: function getWeather() { + return `it's raining`; + }, + parameters: {}, + description: 'gets the weather', }, - parameters: {}, - description: 'gets the weather', }, ], }, @@ -1383,10 +1555,17 @@ describe('resource completions', () => { delta: { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'getWeather', - }, + tool_calls: [ + { + type: 'function', + index: 0, + id: '123', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], }, }, ], @@ -1399,8 +1578,21 @@ describe('resource completions', () => { await runner.done().catch(() => {}); expect(listener.eventMessages).toEqual([ - { role: 'assistant', content: null, function_call: { name: 'getWeather', arguments: '' } }, - { role: 'function', content: `it's raining`, name: 'getWeather' }, + { + role: 'assistant', + content: null, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], + }, + { role: 'tool', content: `it's raining`, tool_call_id: '123' }, ]); expect(listener.eventFunctionCallResults).toEqual([`it's raining`]); await listener.sanityCheck({ error: 'Request was aborted.' }); @@ -1411,7 +1603,7 @@ describe('resource completions', () => { const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010', fetch }); - const runner = openai.beta.chat.completions.runFunctions({ + const runner = openai.beta.chat.completions.runTools({ stream: true, messages: [ { @@ -1420,8 +1612,8 @@ describe('resource completions', () => { }, ], model: 'gpt-3.5-turbo', - functions: [ - new ParsingFunction({ + tools: [ + new ParsingToolFunction({ name: 'numProperties', function: (obj: object) => String(Object.keys(obj).length), parameters: { type: 'object' }, @@ -1455,10 +1647,17 @@ describe('resource completions', () => { delta: { role: 'assistant', content: null, - function_call: { - arguments: '{"a": 1, "b": 2, "c": 3}', - name: 'numProperties', - }, + tool_calls: [ + { + type: 'function', + id: '123', + index: 0, + function: { + arguments: '{"a": 1, "b": 2, "c": 3}', + name: 'numProperties', + }, + }, + ], }, }, ], @@ -1476,15 +1675,21 @@ describe('resource completions', () => { { role: 'assistant', content: null, - function_call: { - arguments: '{"a": 1, "b": 2, "c": 3}', - name: 'numProperties', - }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '{"a": 1, "b": 2, "c": 3}', + name: 'numProperties', + }, + }, + ], }, { - role: 'function', + role: 'tool', content: '3', - name: 'numProperties', + tool_call_id: '123', }, ]); for (const choice of contentChoiceDeltas(`there are 3 properties in {"a": 1, "b": 2, "c": 3}`)) { @@ -1504,9 +1709,15 @@ describe('resource completions', () => { { role: 'assistant', content: null, - function_call: { name: 'numProperties', arguments: '{"a": 1, "b": 2, "c": 3}' }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { name: 'numProperties', arguments: '{"a": 1, "b": 2, "c": 3}' }, + }, + ], }, - { role: 'function', content: '3', name: 'numProperties' }, + { role: 'tool', content: '3', tool_call_id: '123' }, { role: 'assistant', content: 'there are 3 properties in {"a": 1, "b": 2, "c": 3}' }, ]); expect(listener.eventFunctionCallResults).toEqual(['3']); @@ -1517,7 +1728,7 @@ describe('resource completions', () => { const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010', fetch }); - const runner = openai.beta.chat.completions.runFunctions({ + const runner = openai.beta.chat.completions.runTools({ stream: true, messages: [ { @@ -1526,8 +1737,8 @@ describe('resource completions', () => { }, ], model: 'gpt-3.5-turbo', - functions: [ - new ParsingFunction({ + tools: [ + new ParsingToolFunction({ name: 'numProperties', function: (obj: object) => String(Object.keys(obj).length), parameters: { type: 'object' }, @@ -1552,7 +1763,10 @@ describe('resource completions', () => { content: 'can you tell me how many properties are in {"a": 1, "b": 2, "c": 3}', }, ]); - for (const choice of functionCallDeltas('[{"a": 1, "b": 2, "c": 3}]', { name: 'numProperties' })) { + for (const choice of functionCallDeltas('[{"a": 1, "b": 2, "c": 3}]', { + name: 'numProperties', + id: '123', + })) { yield { id: '1', choices: [choice], @@ -1571,18 +1785,27 @@ describe('resource completions', () => { { role: 'assistant', content: null, - function_call: { - arguments: '[{"a": 1, "b": 2, "c": 3}]', - name: 'numProperties', - }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '[{"a": 1, "b": 2, "c": 3}]', + name: 'numProperties', + }, + }, + ], }, { - role: 'function', + role: 'tool', content: `must be an object`, - name: 'numProperties', + tool_call_id: '123', }, ]); - for (const choice of functionCallDeltas('{"a": 1, "b": 2, "c": 3}', { name: 'numProperties' })) { + for (const choice of functionCallDeltas('{"a": 1, "b": 2, "c": 3}', { + name: 'numProperties', + id: '1234', + })) { yield { id: '2', choices: [choice], @@ -1601,28 +1824,40 @@ describe('resource completions', () => { { role: 'assistant', content: null, - function_call: { - arguments: '[{"a": 1, "b": 2, "c": 3}]', - name: 'numProperties', - }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '[{"a": 1, "b": 2, "c": 3}]', + name: 'numProperties', + }, + }, + ], }, { - role: 'function', + role: 'tool', content: `must be an object`, - name: 'numProperties', + tool_call_id: '123', }, { role: 'assistant', content: null, - function_call: { - arguments: '{"a": 1, "b": 2, "c": 3}', - name: 'numProperties', - }, + tool_calls: [ + { + type: 'function', + id: '1234', + function: { + arguments: '{"a": 1, "b": 2, "c": 3}', + name: 'numProperties', + }, + }, + ], }, { - role: 'function', + role: 'tool', content: '3', - name: 'numProperties', + tool_call_id: '1234', }, ]); for (const choice of contentChoiceDeltas(`there are 3 properties in {"a": 1, "b": 2, "c": 3}`)) { @@ -1642,15 +1877,27 @@ describe('resource completions', () => { { role: 'assistant', content: null, - function_call: { name: 'numProperties', arguments: '[{"a": 1, "b": 2, "c": 3}]' }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { name: 'numProperties', arguments: '[{"a": 1, "b": 2, "c": 3}]' }, + }, + ], }, - { role: 'function', content: `must be an object`, name: 'numProperties' }, + { role: 'tool', content: `must be an object`, tool_call_id: '123' }, { role: 'assistant', content: null, - function_call: { name: 'numProperties', arguments: '{"a": 1, "b": 2, "c": 3}' }, + tool_calls: [ + { + type: 'function', + id: '1234', + function: { name: 'numProperties', arguments: '{"a": 1, "b": 2, "c": 3}' }, + }, + ], }, - { role: 'function', content: '3', name: 'numProperties' }, + { role: 'tool', content: '3', tool_call_id: '1234' }, { role: 'assistant', content: 'there are 3 properties in {"a": 1, "b": 2, "c": 3}' }, ]); expect(listener.eventFunctionCallResults).toEqual([`must be an object`, '3']); @@ -1661,20 +1908,26 @@ describe('resource completions', () => { const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010', fetch }); - const runner = openai.beta.chat.completions.runFunctions({ + const runner = openai.beta.chat.completions.runTools({ stream: true, messages: [{ role: 'user', content: 'tell me what the weather is like' }], model: 'gpt-3.5-turbo', - function_call: { - name: 'getWeather', + tool_choice: { + type: 'function', + function: { + name: 'getWeather', + }, }, - functions: [ + tools: [ { - function: function getWeather() { - return `it's raining`; + type: 'function', + function: { + function: function getWeather() { + return `it's raining`; + }, + parameters: {}, + description: 'gets the weather', }, - parameters: {}, - description: 'gets the weather', }, ], }); @@ -1692,10 +1945,17 @@ describe('resource completions', () => { delta: { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'getWeather', - }, + tool_calls: [ + { + type: 'function', + index: 0, + id: '123', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], }, }, ], @@ -1708,8 +1968,21 @@ describe('resource completions', () => { ]); expect(listener.eventMessages).toEqual([ - { role: 'assistant', content: null, function_call: { name: 'getWeather', arguments: '' } }, - { role: 'function', content: `it's raining`, name: 'getWeather' }, + { + role: 'assistant', + content: null, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], + }, + { role: 'tool', tool_call_id: '123', content: `it's raining` }, ]); expect(listener.eventFunctionCallResults).toEqual([`it's raining`]); await listener.sanityCheck(); @@ -1719,17 +1992,20 @@ describe('resource completions', () => { const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010', fetch }); - const runner = openai.beta.chat.completions.runFunctions({ + const runner = openai.beta.chat.completions.runTools({ stream: true, messages: [{ role: 'user', content: 'tell me what the weather is like' }], model: 'gpt-3.5-turbo', - functions: [ + tools: [ { - function: function getWeather() { - return `it's raining`; + type: 'function', + function: { + function: function getWeather() { + return `it's raining`; + }, + parameters: {}, + description: 'gets the weather', }, - parameters: {}, - description: 'gets the weather', }, ], }); @@ -1747,10 +2023,17 @@ describe('resource completions', () => { delta: { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'get_weather', - }, + tool_calls: [ + { + type: 'function', + index: 0, + id: '123', + function: { + arguments: '', + name: 'get_weather', + }, + }, + ], }, }, ], @@ -1765,15 +2048,21 @@ describe('resource completions', () => { { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'get_weather', - }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'get_weather', + }, + }, + ], }, { - role: 'function', - content: `Invalid function_call: "get_weather". Available options are: "getWeather". Please try again`, - name: 'get_weather', + role: 'tool', + content: `Invalid tool_call: "get_weather". Available options are: "getWeather". Please try again`, + tool_call_id: '123', }, ]); yield { @@ -1785,10 +2074,17 @@ describe('resource completions', () => { delta: { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'getWeather', - }, + tool_calls: [ + { + type: 'function', + index: 0, + id: '1234', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], }, }, ], @@ -1803,28 +2099,40 @@ describe('resource completions', () => { { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'get_weather', - }, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'get_weather', + }, + }, + ], }, { - role: 'function', - content: `Invalid function_call: "get_weather". Available options are: "getWeather". Please try again`, - name: 'get_weather', + role: 'tool', + content: `Invalid tool_call: "get_weather". Available options are: "getWeather". Please try again`, + tool_call_id: '123', }, { role: 'assistant', content: null, - function_call: { - arguments: '', - name: 'getWeather', - }, + tool_calls: [ + { + type: 'function', + id: '1234', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], }, { - role: 'function', + role: 'tool', content: `it's raining`, - name: 'getWeather', + tool_call_id: '1234', }, ]); for (const choice of contentChoiceDeltas(`it's raining`)) { @@ -1841,83 +2149,45 @@ describe('resource completions', () => { ]); expect(listener.eventMessages).toEqual([ - { role: 'assistant', content: null, function_call: { name: 'get_weather', arguments: '' } }, { - role: 'function', - content: `Invalid function_call: "get_weather". Available options are: "getWeather". Please try again`, - name: 'get_weather', + role: 'assistant', + content: null, + tool_calls: [ + { + type: 'function', + id: '123', + function: { + arguments: '', + name: 'get_weather', + }, + }, + ], }, - { role: 'assistant', content: null, function_call: { name: 'getWeather', arguments: '' } }, - { role: 'function', content: `it's raining`, name: 'getWeather' }, - { role: 'assistant', content: "it's raining" }, - ]); - expect(listener.eventFunctionCallResults).toEqual([ - `Invalid function_call: "get_weather". Available options are: "getWeather". Please try again`, - `it's raining`, - ]); - await listener.sanityCheck(); - }); - test('wrong function name with single function call', async () => { - const { fetch, handleRequest } = mockStreamingChatCompletionFetch(); - - const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010', fetch }); - - const runner = openai.beta.chat.completions.runFunctions({ - stream: true, - messages: [{ role: 'user', content: 'tell me what the weather is like' }], - model: 'gpt-3.5-turbo', - function_call: { - name: 'getWeather', + { + role: 'tool', + content: `Invalid tool_call: "get_weather". Available options are: "getWeather". Please try again`, + tool_call_id: '123', }, - functions: [ - { - function: function getWeather() { - return `it's raining`; - }, - parameters: {}, - description: 'gets the weather', - }, - ], - }); - const listener = new StreamingRunnerListener(runner); - - await Promise.all([ - handleRequest(async function* (request): AsyncIterable { - expect(request.messages).toEqual([{ role: 'user', content: 'tell me what the weather is like' }]); - yield { - id: '1', - choices: [ - { - index: 0, - finish_reason: 'function_call', - delta: { - role: 'assistant', - content: null, - function_call: { - arguments: '', - name: 'get_weather', - }, - }, - }, - ], - created: Math.floor(Date.now() / 1000), - model: 'gpt-3.5-turbo', - object: 'chat.completion.chunk', - }; - }), - runner.done(), - ]); - - expect(listener.eventMessages).toEqual([ - { role: 'assistant', content: null, function_call: { name: 'get_weather', arguments: '' } }, { - role: 'function', - content: `Invalid function_call: "get_weather". Available options are: "getWeather". Please try again`, - name: 'get_weather', + role: 'assistant', + content: null, + tool_calls: [ + { + type: 'function', + id: '1234', + function: { + arguments: '', + name: 'getWeather', + }, + }, + ], }, + { role: 'tool', content: `it's raining`, tool_call_id: '1234' }, + { role: 'assistant', content: "it's raining" }, ]); expect(listener.eventFunctionCallResults).toEqual([ - `Invalid function_call: "get_weather". Available options are: "getWeather". Please try again`, + `Invalid tool_call: "get_weather". Available options are: "getWeather". Please try again`, + `it's raining`, ]); await listener.sanityCheck(); }); diff --git a/src/lib/ChatCompletionRunner.ts b/src/lib/ChatCompletionRunner.ts index 4a7ca18a6..a110f0192 100644 --- a/src/lib/ChatCompletionRunner.ts +++ b/src/lib/ChatCompletionRunner.ts @@ -30,13 +30,18 @@ export type ChatCompletionToolRunnerParams { + /** @deprecated - please use `runTools` instead. */ static runFunctions( completions: Completions, params: ChatCompletionFunctionRunnerParams, options?: RunnerOptions, ): ChatCompletionRunner { const runner = new ChatCompletionRunner(); - runner._run(() => runner._runFunctions(completions, params, options)); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runFunctions' }, + }; + runner._run(() => runner._runFunctions(completions, params, opts)); return runner; } @@ -46,7 +51,11 @@ export class ChatCompletionRunner extends AbstractChatCompletionRunner runner._runTools(completions, params, options)); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runTools' }, + }; + runner._run(() => runner._runTools(completions, params, opts)); return runner; } diff --git a/src/lib/ChatCompletionStreamingRunner.ts b/src/lib/ChatCompletionStreamingRunner.ts index a2da456e6..cf58c5270 100644 --- a/src/lib/ChatCompletionStreamingRunner.ts +++ b/src/lib/ChatCompletionStreamingRunner.ts @@ -37,18 +37,18 @@ export class ChatCompletionStreamingRunner return runner; } + /** @deprecated - please use `runTools` instead. */ static runFunctions( completions: Completions, params: ChatCompletionStreamingFunctionRunnerParams, options?: RunnerOptions, ): ChatCompletionStreamingRunner { const runner = new ChatCompletionStreamingRunner(); - runner._run(() => - runner._runFunctions(completions, params, { - ...options, - headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runFunctions' }, - }), - ); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runFunctions' }, + }; + runner._run(() => runner._runFunctions(completions, params, opts)); return runner; } @@ -58,12 +58,11 @@ export class ChatCompletionStreamingRunner options?: RunnerOptions, ): ChatCompletionStreamingRunner { const runner = new ChatCompletionStreamingRunner(); - runner._run(() => - runner._runTools(completions, params, { - ...options, - headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runTools' }, - }), - ); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runTools' }, + }; + runner._run(() => runner._runTools(completions, params, opts)); return runner; } } diff --git a/src/lib/RunnableFunction.ts b/src/lib/RunnableFunction.ts index 5c6845cab..96ca06c86 100644 --- a/src/lib/RunnableFunction.ts +++ b/src/lib/RunnableFunction.ts @@ -61,9 +61,18 @@ export type RunnableFunction = : Args extends object ? RunnableFunctionWithParse : never; -export type RunnableToolFunction = { +export type RunnableToolFunction = + Args extends string ? RunnableToolFunctionWithoutParse + : Args extends object ? RunnableToolFunctionWithParse + : never; + +export type RunnableToolFunctionWithoutParse = { + type: 'function'; + function: RunnableFunctionWithoutParse; +}; +export type RunnableToolFunctionWithParse = { type: 'function'; - function: RunnableFunction; + function: RunnableFunctionWithParse; }; export function isRunnableFunctionWithParse( @@ -91,8 +100,16 @@ export type RunnableTools = /** * This is helper class for passing a `function` and `parse` where the `function` * argument type matches the `parse` return type. + * + * @deprecated - please use ParsingToolFunction instead. */ export class ParsingFunction { + function: RunnableFunctionWithParse['function']; + parse: RunnableFunctionWithParse['parse']; + parameters: RunnableFunctionWithParse['parameters']; + description: RunnableFunctionWithParse['description']; + name?: RunnableFunctionWithParse['name']; + constructor(input: RunnableFunctionWithParse) { this.function = input.function; this.parse = input.parse; @@ -100,9 +117,18 @@ export class ParsingFunction { this.description = input.description; this.name = input.name; } - function: RunnableFunctionWithParse['function']; - parse: RunnableFunctionWithParse['parse']; - parameters: RunnableFunctionWithParse['parameters']; - description: RunnableFunctionWithParse['description']; - name?: RunnableFunctionWithParse['name']; +} + +/** + * This is helper class for passing a `function` and `parse` where the `function` + * argument type matches the `parse` return type. + */ +export class ParsingToolFunction { + type: 'function'; + function: RunnableFunctionWithParse; + + constructor(input: RunnableFunctionWithParse) { + this.type = 'function'; + this.function = input; + } } diff --git a/src/resources/beta/chat/completions.ts b/src/resources/beta/chat/completions.ts index f4904acb7..e7f89f5cf 100644 --- a/src/resources/beta/chat/completions.ts +++ b/src/resources/beta/chat/completions.ts @@ -19,6 +19,7 @@ export { RunnableFunctionWithParse, RunnableFunctionWithoutParse, ParsingFunction, + ParsingToolFunction, } from 'openai/lib/RunnableFunction'; import { ChatCompletionToolRunnerParams } from 'openai/lib/ChatCompletionRunner'; export { ChatCompletionToolRunnerParams } from 'openai/lib/ChatCompletionRunner'; @@ -29,13 +30,7 @@ export { ChatCompletionStream, type ChatCompletionStreamParams } from 'openai/li export class Completions extends APIResource { /** - * A convenience helper for using function calls with the /chat/completions - * endpoint which automatically calls the JavaScript functions you provide and - * sends their results back to the /chat/completions endpoint, looping as long as - * the model requests function calls. - * - * For more details and examples, see - * [the docs](https://github.com/openai/openai-node#automated-function-calls) + * @deprecated - use `runTools` instead. */ runFunctions( body: ChatCompletionFunctionRunnerParams, From 364bea434e063cd9e132cef09ac7f3bbdde3e30a Mon Sep 17 00:00:00 2001 From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com> Date: Mon, 11 Dec 2023 23:01:12 +0000 Subject: [PATCH 8/8] release: 4.21.0 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 24 ++++++++++++++++++++++++ README.md | 2 +- build-deno | 2 +- package.json | 2 +- src/version.ts | 2 +- 6 files changed, 29 insertions(+), 5 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 1c21a9343..4bb7e5af3 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "4.20.1" + ".": "4.21.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 331e2bd37..77ab63845 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,29 @@ # Changelog +## 4.21.0 (2023-12-11) + +Full Changelog: [v4.20.1...v4.21.0](https://github.com/openai/openai-node/compare/v4.20.1...v4.21.0) + +### Features + +* **client:** support reading the base url from an env variable ([#547](https://github.com/openai/openai-node/issues/547)) ([06fb68d](https://github.com/openai/openai-node/commit/06fb68de1ff80983e349b6715d1037e2072c8dd4)) + + +### Bug Fixes + +* correct some runTools behavior and deprecate runFunctions ([#562](https://github.com/openai/openai-node/issues/562)) ([f5cdd0f](https://github.com/openai/openai-node/commit/f5cdd0f704d3d075cdfc5bc2df1f7a8bae5cd9f1)) +* prevent 400 when using runTools/runFunctions with Azure OpenAI API ([#544](https://github.com/openai/openai-node/issues/544)) ([735d9b8](https://github.com/openai/openai-node/commit/735d9b86acdc067e1ee6ebe1ea50de2955431050)) + + +### Documentation + +* **readme:** update example snippets ([#546](https://github.com/openai/openai-node/issues/546)) ([566d290](https://github.com/openai/openai-node/commit/566d290006920f536788bb77f4d24a6906e2971f)) + + +### Build System + +* specify `packageManager: yarn` ([#561](https://github.com/openai/openai-node/issues/561)) ([935b898](https://github.com/openai/openai-node/commit/935b8983c74f7b03b67d22f4d194989838f963f3)) + ## 4.20.1 (2023-11-24) Full Changelog: [v4.20.0...v4.20.1](https://github.com/openai/openai-node/compare/v4.20.0...v4.20.1) diff --git a/README.md b/README.md index 2a23c32de..a52e2f884 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ You can import in Deno via: ```ts -import OpenAI from 'https://deno.land/x/openai@v4.20.1/mod.ts'; +import OpenAI from 'https://deno.land/x/openai@v4.21.0/mod.ts'; ``` diff --git a/build-deno b/build-deno index 610d47c27..2f788d175 100755 --- a/build-deno +++ b/build-deno @@ -14,7 +14,7 @@ This is a build produced from https://github.com/openai/openai-node – please g Usage: \`\`\`ts -import OpenAI from "https://deno.land/x/openai@v4.20.1/mod.ts"; +import OpenAI from "https://deno.land/x/openai@v4.21.0/mod.ts"; const client = new OpenAI(); \`\`\` diff --git a/package.json b/package.json index 0d66b7365..8b62a945c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "openai", - "version": "4.20.1", + "version": "4.21.0", "description": "The official TypeScript library for the OpenAI API", "author": "OpenAI ", "types": "dist/index.d.ts", diff --git a/src/version.ts b/src/version.ts index fb0af904c..ae187947c 100644 --- a/src/version.ts +++ b/src/version.ts @@ -1 +1 @@ -export const VERSION = '4.20.1'; // x-release-please-version +export const VERSION = '4.21.0'; // x-release-please-version