Skip to content

Commit

Permalink
Rename GelAI to RAGClient (#1198)
Browse files Browse the repository at this point in the history
  • Loading branch information
diksipav authored Feb 22, 2025
1 parent 8f65045 commit e69568e
Show file tree
Hide file tree
Showing 4 changed files with 67 additions and 58 deletions.
29 changes: 19 additions & 10 deletions edgedb/ai/index.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,24 @@
import {
GelMessage,
GelSystemMessage,
GelUserMessage,
GelAssistantMessage,
GelToolMessage,
createRAGClient,
RAGClient,
Message,
SystemMessage,
UserMessage,
AssistantMessage,
ToolMessage,
RAGOptions,
} from "@gel/ai";

export * from "@gel/ai";

export type EdgeDBSystemMessage = GelSystemMessage;
export type EdgeDBUserMessage = GelUserMessage;
export type EdgeDBAssistantMessage = GelAssistantMessage;
export type EdgeDBToolMessage = GelToolMessage;
export type EdgeDBMessage = GelMessage;
export type EdgeDBAI = RAGClient;
export const EdgeDBAI = RAGClient;

export const createAI = createRAGClient;

export type EdgeDBSystemMessage = SystemMessage;
export type EdgeDBUserMessage = UserMessage;
export type EdgeDBAssistantMessage = AssistantMessage;
export type EdgeDBToolMessage = ToolMessage;
export type EdgeDBMessage = Message;
export type AIOptions = RAGOptions;
48 changes: 24 additions & 24 deletions packages/ai/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,26 +16,26 @@ See the AI documentation for detailed guidance on setting up the AI extension an

## API Reference

### `createAI(client: Client, options: Partial<AIOptions> = {}): GelAI`
### `createRAGClient(client: Client, options: Partial<AIOptions> = {}): RAGClient`

Creates an instance of `GelAI` with the specified client and options.
Creates an instance of `RAGClient` with the specified client and options.

- `client`: An Gel client instance.
- `client`: A Gel client instance.
- `options`: Configuration options for the AI model.
- `model`: Required. Specifies the AI model to use. This could be some of the OpenAI, Mistral or Anthropic models supported by Gel AI.
- `prompt`: Optional. Defines the input messages for the AI model. The prompt can have an `ID` or a `name` referencing a stored prompt. The referenced prompt will supply predefined messages. Optionally, include a custom list of messages using the `custom` field. These custom messages will be concatenated with messages from the stored prompt referenced by `id` or `name`. If no `id` or `name` is specified, only the `custom` messages will be used. If no `id`, `name`, or `custom` messages are provided, the built-in system prompt will be used by default.

### `GelAI`
### `RAGClient`

#### Public Methods

- `withConfig(options: Partial<AIOptions>): GelAI`
- `withConfig(options: Partial<AIOptions>): RAGClient`

Returns a new `GelAI` instance with updated configuration options.
Returns a new `RAGClient` instance with updated configuration options.

- `withContext(context: Partial<QueryContext>): GelAI`
- `withContext(context: Partial<QueryContext>): RAGClient`

Returns a new `GelAI` instance with an updated query context.
Returns a new `RAGClient` instance with an updated query context.

- `async queryRag(message: string, context?: QueryContext): Promise<string>`

Expand All @@ -62,54 +62,54 @@ The following example demonstrates how to use the `@gel/ai` package to query an

```typescript
import { createClient } from "gel";
import { createAI } from "./src/index.js";
import { createRAGClient } from "@gel/ai";

const client = createClient({
const client = createRAGClient({
instanceName: "_localdev",
database: "main",
tlsSecurity: "insecure",
});

const gpt4Ai = createAI(client, {
model: "gpt-4-turbo-preview",
const gpt4Rag = createRAGClient(client, {
model: "gpt-4-turbo",
});

const astronomyAi = gpt4Ai.withContext({ query: "Astronomy" });
const astronomyRag = gpt4Rag.withContext({ query: "Astronomy" });

console.time("gpt-4 Time");
console.log(await astronomyAi.queryRag("What color is the sky on Mars?"));
console.log(await astronomyRag.queryRag("What color is the sky on Mars?"));
console.timeEnd("gpt-4 Time");

const fastAstronomyAi = astronomyAi.withConfig({
model: "gpt-3.5-turbo",
const fastAstronomyRag = astronomyRag.withConfig({
model: "gpt-4o",
});

console.time("gpt-3.5 Time");
console.log(await fastAstronomyAi.queryRag("What color is the sky on Mars?"));
console.timeEnd("gpt-3.5 Time");
console.time("gpt-4o Time");
console.log(await fastAstronomyRag.queryRag("What color is the sky on Mars?"));
console.timeEnd("gpt-4o Time");

const fastChemistryAi = fastAstronomyAi.withContext({ query: "Chemistry" });
const fastChemistryRag = fastAstronomyRag.withContext({ query: "Chemistry" });

console.log(
await fastChemistryAi.queryRag("What is the atomic number of gold?"),
await fastChemistryRag.queryRag("What is the atomic number of gold?"),
);

// handle the Response object
const response = await fastChemistryAi.streamRag(
const response = await fastChemistryRag.streamRag(
"What is the atomic number of gold?",
);
handleReadableStream(response); // custom function that reads the stream

// handle individual chunks as they arrive
for await (const chunk of fastChemistryAi.streamRag(
for await (const chunk of fastChemistryRag.streamRag(
"What is the atomic number of gold?",
)) {
console.log("chunk", chunk);
}

// embeddings
console.log(
await fastChemistryAi.generateEmbeddings(
await fastChemistryRag.generateEmbeddings(
["What is the atomic number of gold?"],
"text-embedding-ada-002",
),
Expand Down
20 changes: 10 additions & 10 deletions packages/ai/src/core.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import {
type AuthenticatedFetch,
} from "gel/dist/utils.js";
import {
type AIOptions,
type RAGOptions,
type QueryContext,
type StreamingMessage,
type RagRequest,
Expand All @@ -18,25 +18,25 @@ import { getHTTPSCRAMAuth } from "gel/dist/httpScram.js";
import { cryptoUtils } from "gel/dist/browserCrypto.js";
import { extractMessageFromParsedEvent, handleResponseError } from "./utils.js";

export function createAI(client: Client, options: AIOptions) {
return new GelAI(client, options);
export function createRAGClient(client: Client, options: RAGOptions) {
return new RAGClient(client, options);
}

const httpSCRAMAuth = getHTTPSCRAMAuth(cryptoUtils);

export class GelAI {
export class RAGClient {
/** @internal */
private readonly authenticatedFetch: Promise<AuthenticatedFetch>;
private readonly options: AIOptions;
private readonly options: RAGOptions;
private readonly context: QueryContext;

/** @internal */
constructor(
public readonly client: Client,
options: AIOptions,
options: RAGOptions,
context: Partial<QueryContext> = {},
) {
this.authenticatedFetch = GelAI.getAuthenticatedFetch(client);
this.authenticatedFetch = RAGClient.getAuthenticatedFetch(client);
this.options = options;
this.context = {
query: context.query ?? "",
Expand All @@ -53,16 +53,16 @@ export class GelAI {
return getAuthenticatedFetch(connectConfig, httpSCRAMAuth, "ext/ai/");
}

withConfig(options: Partial<AIOptions>) {
return new GelAI(
withConfig(options: Partial<RAGOptions>) {
return new RAGClient(
this.client,
{ ...this.options, ...options },
this.context,
);
}

withContext(context: Partial<QueryContext>) {
return new GelAI(this.client, this.options, {
return new RAGClient(this.client, this.options, {
...this.context,
...context,
});
Expand Down
28 changes: 14 additions & 14 deletions packages/ai/src/types.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
export type ChatParticipantRole = "system" | "user" | "assistant" | "tool";

export interface GelSystemMessage {
export interface SystemMessage {
role: "system";
content: string;
}

export interface GelUserMessage {
export interface UserMessage {
role: "user";
content: { type: "text"; text: string }[];
}

export interface GelAssistantMessage {
export interface AssistantMessage {
role: "assistant";
content: string;
tool_calls?: {
Expand All @@ -20,24 +20,24 @@ export interface GelAssistantMessage {
}[];
}

export interface GelToolMessage {
export interface ToolMessage {
role: "tool";
content: string;
tool_call_id: string;
}

export type GelMessage =
| GelSystemMessage
| GelUserMessage
| GelAssistantMessage
| GelToolMessage;
export type Message =
| SystemMessage
| UserMessage
| AssistantMessage
| ToolMessage;

export type Prompt =
| { name: string; custom?: GelMessage[] }
| { id: string; custom?: GelMessage[] }
| { custom: GelMessage[] };
| { name: string; custom?: Message[] }
| { id: string; custom?: Message[] }
| { custom: Message[] };

export interface AIOptions {
export interface RAGOptions {
model: string;
prompt?: Prompt;
}
Expand All @@ -55,7 +55,7 @@ export interface RagRequestPrompt {
}

export interface RagRequestMessages {
messages: GelMessage[];
messages: Message[];
[key: string]: unknown;
}

Expand Down

0 comments on commit e69568e

Please sign in to comment.