From e69568ebb13fc0e5a2ab8c81ea764049e73532ae Mon Sep 17 00:00:00 2001 From: Dijana Pavlovic Date: Sat, 22 Feb 2025 22:05:50 +0100 Subject: [PATCH] Rename GelAI to RAGClient (#1198) Related https://github.com/geldata/gel-python/pull/578 --- edgedb/ai/index.ts | 29 +++++++++++++++--------- packages/ai/README.md | 48 ++++++++++++++++++++-------------------- packages/ai/src/core.ts | 20 ++++++++--------- packages/ai/src/types.ts | 28 +++++++++++------------ 4 files changed, 67 insertions(+), 58 deletions(-) diff --git a/edgedb/ai/index.ts b/edgedb/ai/index.ts index 775cef95d..51b954b2a 100644 --- a/edgedb/ai/index.ts +++ b/edgedb/ai/index.ts @@ -1,15 +1,24 @@ import { - GelMessage, - GelSystemMessage, - GelUserMessage, - GelAssistantMessage, - GelToolMessage, + createRAGClient, + RAGClient, + Message, + SystemMessage, + UserMessage, + AssistantMessage, + ToolMessage, + RAGOptions, } from "@gel/ai"; export * from "@gel/ai"; -export type EdgeDBSystemMessage = GelSystemMessage; -export type EdgeDBUserMessage = GelUserMessage; -export type EdgeDBAssistantMessage = GelAssistantMessage; -export type EdgeDBToolMessage = GelToolMessage; -export type EdgeDBMessage = GelMessage; +export type EdgeDBAI = RAGClient; +export const EdgeDBAI = RAGClient; + +export const createAI = createRAGClient; + +export type EdgeDBSystemMessage = SystemMessage; +export type EdgeDBUserMessage = UserMessage; +export type EdgeDBAssistantMessage = AssistantMessage; +export type EdgeDBToolMessage = ToolMessage; +export type EdgeDBMessage = Message; +export type AIOptions = RAGOptions; diff --git a/packages/ai/README.md b/packages/ai/README.md index 810fd6d1e..56e81ceb7 100644 --- a/packages/ai/README.md +++ b/packages/ai/README.md @@ -16,26 +16,26 @@ See the AI documentation for detailed guidance on setting up the AI extension an ## API Reference -### `createAI(client: Client, options: Partial = {}): GelAI` +### `createRAGClient(client: Client, options: Partial = {}): RAGClient` -Creates an instance of `GelAI` with the specified client and options. +Creates an instance of `RAGClient` with the specified client and options. -- `client`: An Gel client instance. +- `client`: A Gel client instance. - `options`: Configuration options for the AI model. - `model`: Required. Specifies the AI model to use. This could be some of the OpenAI, Mistral or Anthropic models supported by Gel AI. - `prompt`: Optional. Defines the input messages for the AI model. The prompt can have an `ID` or a `name` referencing a stored prompt. The referenced prompt will supply predefined messages. Optionally, include a custom list of messages using the `custom` field. These custom messages will be concatenated with messages from the stored prompt referenced by `id` or `name`. If no `id` or `name` is specified, only the `custom` messages will be used. If no `id`, `name`, or `custom` messages are provided, the built-in system prompt will be used by default. -### `GelAI` +### `RAGClient` #### Public Methods -- `withConfig(options: Partial): GelAI` +- `withConfig(options: Partial): RAGClient` - Returns a new `GelAI` instance with updated configuration options. + Returns a new `RAGClient` instance with updated configuration options. -- `withContext(context: Partial): GelAI` +- `withContext(context: Partial): RAGClient` - Returns a new `GelAI` instance with an updated query context. + Returns a new `RAGClient` instance with an updated query context. - `async queryRag(message: string, context?: QueryContext): Promise` @@ -62,46 +62,46 @@ The following example demonstrates how to use the `@gel/ai` package to query an ```typescript import { createClient } from "gel"; -import { createAI } from "./src/index.js"; +import { createRAGClient } from "@gel/ai"; -const client = createClient({ +const client = createRAGClient({ instanceName: "_localdev", database: "main", tlsSecurity: "insecure", }); -const gpt4Ai = createAI(client, { - model: "gpt-4-turbo-preview", +const gpt4Rag = createRAGClient(client, { + model: "gpt-4-turbo", }); -const astronomyAi = gpt4Ai.withContext({ query: "Astronomy" }); +const astronomyRag = gpt4Rag.withContext({ query: "Astronomy" }); console.time("gpt-4 Time"); -console.log(await astronomyAi.queryRag("What color is the sky on Mars?")); +console.log(await astronomyRag.queryRag("What color is the sky on Mars?")); console.timeEnd("gpt-4 Time"); -const fastAstronomyAi = astronomyAi.withConfig({ - model: "gpt-3.5-turbo", +const fastAstronomyRag = astronomyRag.withConfig({ + model: "gpt-4o", }); -console.time("gpt-3.5 Time"); -console.log(await fastAstronomyAi.queryRag("What color is the sky on Mars?")); -console.timeEnd("gpt-3.5 Time"); +console.time("gpt-4o Time"); +console.log(await fastAstronomyRag.queryRag("What color is the sky on Mars?")); +console.timeEnd("gpt-4o Time"); -const fastChemistryAi = fastAstronomyAi.withContext({ query: "Chemistry" }); +const fastChemistryRag = fastAstronomyRag.withContext({ query: "Chemistry" }); console.log( - await fastChemistryAi.queryRag("What is the atomic number of gold?"), + await fastChemistryRag.queryRag("What is the atomic number of gold?"), ); // handle the Response object -const response = await fastChemistryAi.streamRag( +const response = await fastChemistryRag.streamRag( "What is the atomic number of gold?", ); handleReadableStream(response); // custom function that reads the stream // handle individual chunks as they arrive -for await (const chunk of fastChemistryAi.streamRag( +for await (const chunk of fastChemistryRag.streamRag( "What is the atomic number of gold?", )) { console.log("chunk", chunk); @@ -109,7 +109,7 @@ for await (const chunk of fastChemistryAi.streamRag( // embeddings console.log( - await fastChemistryAi.generateEmbeddings( + await fastChemistryRag.generateEmbeddings( ["What is the atomic number of gold?"], "text-embedding-ada-002", ), diff --git a/packages/ai/src/core.ts b/packages/ai/src/core.ts index 9816623d7..703e2d5f0 100644 --- a/packages/ai/src/core.ts +++ b/packages/ai/src/core.ts @@ -7,7 +7,7 @@ import { type AuthenticatedFetch, } from "gel/dist/utils.js"; import { - type AIOptions, + type RAGOptions, type QueryContext, type StreamingMessage, type RagRequest, @@ -18,25 +18,25 @@ import { getHTTPSCRAMAuth } from "gel/dist/httpScram.js"; import { cryptoUtils } from "gel/dist/browserCrypto.js"; import { extractMessageFromParsedEvent, handleResponseError } from "./utils.js"; -export function createAI(client: Client, options: AIOptions) { - return new GelAI(client, options); +export function createRAGClient(client: Client, options: RAGOptions) { + return new RAGClient(client, options); } const httpSCRAMAuth = getHTTPSCRAMAuth(cryptoUtils); -export class GelAI { +export class RAGClient { /** @internal */ private readonly authenticatedFetch: Promise; - private readonly options: AIOptions; + private readonly options: RAGOptions; private readonly context: QueryContext; /** @internal */ constructor( public readonly client: Client, - options: AIOptions, + options: RAGOptions, context: Partial = {}, ) { - this.authenticatedFetch = GelAI.getAuthenticatedFetch(client); + this.authenticatedFetch = RAGClient.getAuthenticatedFetch(client); this.options = options; this.context = { query: context.query ?? "", @@ -53,8 +53,8 @@ export class GelAI { return getAuthenticatedFetch(connectConfig, httpSCRAMAuth, "ext/ai/"); } - withConfig(options: Partial) { - return new GelAI( + withConfig(options: Partial) { + return new RAGClient( this.client, { ...this.options, ...options }, this.context, @@ -62,7 +62,7 @@ export class GelAI { } withContext(context: Partial) { - return new GelAI(this.client, this.options, { + return new RAGClient(this.client, this.options, { ...this.context, ...context, }); diff --git a/packages/ai/src/types.ts b/packages/ai/src/types.ts index ce08a34e5..45f0cb1c2 100644 --- a/packages/ai/src/types.ts +++ b/packages/ai/src/types.ts @@ -1,16 +1,16 @@ export type ChatParticipantRole = "system" | "user" | "assistant" | "tool"; -export interface GelSystemMessage { +export interface SystemMessage { role: "system"; content: string; } -export interface GelUserMessage { +export interface UserMessage { role: "user"; content: { type: "text"; text: string }[]; } -export interface GelAssistantMessage { +export interface AssistantMessage { role: "assistant"; content: string; tool_calls?: { @@ -20,24 +20,24 @@ export interface GelAssistantMessage { }[]; } -export interface GelToolMessage { +export interface ToolMessage { role: "tool"; content: string; tool_call_id: string; } -export type GelMessage = - | GelSystemMessage - | GelUserMessage - | GelAssistantMessage - | GelToolMessage; +export type Message = + | SystemMessage + | UserMessage + | AssistantMessage + | ToolMessage; export type Prompt = - | { name: string; custom?: GelMessage[] } - | { id: string; custom?: GelMessage[] } - | { custom: GelMessage[] }; + | { name: string; custom?: Message[] } + | { id: string; custom?: Message[] } + | { custom: Message[] }; -export interface AIOptions { +export interface RAGOptions { model: string; prompt?: Prompt; } @@ -55,7 +55,7 @@ export interface RagRequestPrompt { } export interface RagRequestMessages { - messages: GelMessage[]; + messages: Message[]; [key: string]: unknown; }