Skip to content

Commit

Permalink
connect /ask command to chatgpt api via npm:ai
Browse files Browse the repository at this point in the history
  • Loading branch information
usirin committed Jun 2, 2024
1 parent 64695cf commit d4d023c
Show file tree
Hide file tree
Showing 8 changed files with 251 additions and 46 deletions.
12 changes: 5 additions & 7 deletions bots/llm-bot/bin/deploy.ts
Original file line number Diff line number Diff line change
@@ -1,20 +1,19 @@
import Debug from "debug";
import { REST, Routes, SlashCommandBuilder } from "discord.js";
import { REST, Routes } from "discord.js";
import { commands } from "../commands";
import { env } from "../env";

const debug = Debug("stage-manager:deploy");

// debug.enabled = true;
const debug = Debug("llm-bot:deploy");
debug.enabled = true;

const rest = new REST({ version: "10" }).setToken(env.DISCORD_TOKEN);

const deploy = async () => {
try {
debug(`Started refreshing ${Object.keys(commands).length} application (/) commands`);

const body = Object.entries(commands).map(([name, { description }]) =>
new SlashCommandBuilder().setName(name).setDescription(description).toJSON(),
const body = Object.entries(commands).map(([name, command]) =>
command.builder.setName(name).toJSON(),
);

const data = (await rest.put(
Expand All @@ -23,7 +22,6 @@ const deploy = async () => {
)) as unknown[];

debug(`Successfully deployed ${data.length} application (/) commands!`);
debug(data);
} catch (error) {
debug(error);
}
Expand Down
20 changes: 13 additions & 7 deletions bots/llm-bot/bin/start.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
import Debug from "debug";
import { Client, Events, GatewayIntentBits } from "discord.js";
import { type CommandCollection, commands } from "../commands";
import { commands } from "../commands";
import { TextDecoderStream, TextEncoderStream } from "../polyfill";

const debug = Debug("stage-manager:start");
globalThis.TextEncoderStream = TextEncoderStream;
globalThis.TextDecoderStream = TextDecoderStream;

const debug = Debug("llm-bot:start");
debug.enabled = true;

const client = new Client({ intents: [GatewayIntentBits.Guilds] });
Expand All @@ -14,19 +18,21 @@ client.once(Events.ClientReady, (c) => {
client.on(Events.InteractionCreate, async (interaction) => {
if (!interaction.isChatInputCommand()) return;

debug(`Received command: ${interaction.commandName} from user: ${interaction.user.username}`);
const name = interaction.commandName;

debug(`Received command: ${name} from user: ${interaction.user.username}`);

const command = (commands as CommandCollection)[interaction.commandName];
const command = commands[name];
if (!command) {
debug(`Command not found: ${interaction.commandName}`);
debug(`Command not found: ${name}`);
return;
}

try {
debug(`Executing command: ${interaction.commandName}`);
debug(`Executing command: ${name}`);
await command.handler(interaction);
} catch (error) {
debug(`Error while executing command: ${interaction.commandName}`);
debug(`Error while executing command: ${name}`);
if (interaction.deferred || interaction.replied) {
await interaction.followUp({
content: "There was an error while executing this command!",
Expand Down
73 changes: 61 additions & 12 deletions bots/llm-bot/commands.ts
Original file line number Diff line number Diff line change
@@ -1,18 +1,67 @@
import type { ChatInputCommandInteraction } from "discord.js";
import { openai } from "@ai-sdk/openai";
import { generateText, streamText } from "ai";
import { PermissionFlagsBits, SlashCommandBuilder } from "discord.js";
import type { CommandCollection } from "./types";

export type CommandCollection = Record<
string,
{
description: string;
handler: (interaction: ChatInputCommandInteraction) => Promise<void>;
}
>;

export const commands = {
export const commands: CommandCollection = {
healthcheck: {
description: "Check if the bot is healthy.",
builder: new SlashCommandBuilder().setDescription("Check if the bot is healthy."),
async handler(interaction) {
await interaction.reply("I'm healthy!");
},
},
} satisfies CommandCollection;
ask: {
builder: new SlashCommandBuilder()
.setDescription("Ask a question.")
.addStringOption((option) =>
option.setName("question").setDescription("The question to ask.").setRequired(true),
)
.setDefaultMemberPermissions(PermissionFlagsBits.Administrator)
.setDMPermission(true),
async handler(interaction) {
const question = interaction.options.getString("question");
await interaction.deferReply();

if (question) {
try {
const answer = await streamText({
model: openai("gpt-4o"),
system:
"You are a helpful assistant. Never return a response more than 1500 characters",
maxTokens: 512,
prompt: question,
});

let content = "";
let pendingChunk = "";
for await (const chunk of answer.textStream) {
if (pendingChunk.length + chunk.length < 100) {
pendingChunk = pendingChunk + chunk;
continue;
}

if (content.length + pendingChunk.length < 2000) {
content = content + pendingChunk;
pendingChunk = "";
await interaction.editReply({ content });
} else {
content = pendingChunk = chunk;
await interaction.followUp({ content });
}
}

if (content.length + pendingChunk.length < 2000) {
content = content + pendingChunk;
pendingChunk = "";
await interaction.editReply({ content });
} else {
content = pendingChunk;
await interaction.followUp({ content });
}
} catch (error) {
console.error(error);
}
}
},
},
};
1 change: 1 addition & 0 deletions bots/llm-bot/env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,5 @@ export const env = parseEnv(process.env, {
DISCORD_TOKEN: z.string(),
DISCORD_APP_ID: z.string(),
DISCORD_SERVER_ID: z.string(),
OPENAI_API_KEY: z.string(),
});
42 changes: 22 additions & 20 deletions bots/llm-bot/package.json
Original file line number Diff line number Diff line change
@@ -1,22 +1,24 @@
{
"name": "@kampus/llm-bot",
"module": "index.ts",
"type": "module",
"scripts": {
"deploy": "bun ./bin/deploy.ts",
"start": "bun ./bin/start.ts"
},
"devDependencies": {
"@types/bun": "latest",
"@types/debug": "^4.1.12"
},
"peerDependencies": {
"typescript": "^5.0.0"
},
"dependencies": {
"debug": "^4.3.4",
"discord.js": "^14.15.2",
"znv": "^0.4.0",
"zod": "^3.23.8"
}
"name": "@kampus/llm-bot",
"module": "index.ts",
"type": "module",
"scripts": {
"deploy": "bun ./bin/deploy.ts",
"start": "bun ./bin/start.ts"
},
"devDependencies": {
"@types/bun": "latest",
"@types/debug": "^4.1.12"
},
"peerDependencies": {
"typescript": "^5.0.0"
},
"dependencies": {
"@ai-sdk/openai": "^0.0.18",
"ai": "^3.1.22",
"debug": "^4.3.4",
"discord.js": "^14.15.2",
"znv": "^0.4.0",
"zod": "^3.23.8"
}
}
124 changes: 124 additions & 0 deletions bots/llm-bot/polyfill.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
/**
* TextEncoderStream polyfill based on Node.js' implementation https://github.com/nodejs/node/blob/3f3226c8e363a5f06c1e6a37abd59b6b8c1923f1/lib/internal/webstreams/encoding.js#L38-L119 (MIT License)
*/
export class TextEncoderStream {
#pendingHighSurrogate: string | null = null;

#handle = new TextEncoder();

#transform = new TransformStream<string, Uint8Array>({
transform: (chunk, controller) => {
// https://encoding.spec.whatwg.org/#encode-and-enqueue-a-chunk
// biome-ignore lint/style/noParameterAssign: This is a polyfill
chunk = String(chunk);

let finalChunk = "";
for (const item of chunk) {
const codeUnit = item.charCodeAt(0);
if (this.#pendingHighSurrogate !== null) {
const highSurrogate = this.#pendingHighSurrogate;

this.#pendingHighSurrogate = null;
if (codeUnit >= 0xdc00 && codeUnit <= 0xdfff) {
finalChunk += highSurrogate + item;
continue;
}

finalChunk += "\uFFFD";
}

if (codeUnit >= 0xd800 && codeUnit <= 0xdbff) {
this.#pendingHighSurrogate = item;
continue;
}

if (codeUnit >= 0xdc00 && codeUnit <= 0xdfff) {
finalChunk += "\uFFFD";
continue;
}

finalChunk += item;
}

if (finalChunk) {
controller.enqueue(this.#handle.encode(finalChunk));
}
},

flush: (controller) => {
// https://encoding.spec.whatwg.org/#encode-and-flush
if (this.#pendingHighSurrogate !== null) {
controller.enqueue(new Uint8Array([0xef, 0xbf, 0xbd]));
}
},
});

get encoding() {
return this.#handle.encoding;
}

get readable() {
return this.#transform.readable;
}

get writable() {
return this.#transform.writable;
}

get [Symbol.toStringTag]() {
return "TextEncoderStream";
}
}

/**
* TextDecoderStream polyfill based on Node.js' implementation https://github.com/nodejs/node/blob/3f3226c8e363a5f06c1e6a37abd59b6b8c1923f1/lib/internal/webstreams/encoding.js#L121-L200 (MIT License)
*/
export class TextDecoderStream {
#handle: TextDecoder;

#transform = new TransformStream({
transform: (chunk, controller) => {
const value = this.#handle.decode(chunk, { stream: true });

if (value) {
controller.enqueue(value);
}
},
flush: (controller) => {
const value = this.#handle.decode();
if (value) {
controller.enqueue(value);
}

controller.terminate();
},
});

constructor(encoding = "utf-8", options: TextDecoderOptions = {}) {
this.#handle = new TextDecoder(encoding, options);
}

get encoding() {
return this.#handle.encoding;
}

get fatal() {
return this.#handle.fatal;
}

get ignoreBOM() {
return this.#handle.ignoreBOM;
}

get readable() {
return this.#transform.readable;
}

get writable() {
return this.#transform.writable;
}

get [Symbol.toStringTag]() {
return "TextDecoderStream";
}
}
25 changes: 25 additions & 0 deletions bots/llm-bot/types.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import type {
ChatInputCommandInteraction,
SharedSlashCommand,
SlashCommandBuilder,
} from "discord.js";

/**
* Defines the structure of a command
*/
export interface Command {
/**
* Slash command builder to define the command
*
* @param builder - The builder to define the command
*/
builder: SharedSlashCommand;
/**
* The function to execute when the command is called
*
* @param interaction - The interaction of the command
*/
handler(interaction: ChatInputCommandInteraction): Promise<void> | void;
}

export type CommandCollection = Record<string, Command>;
Binary file modified bun.lockb
Binary file not shown.

0 comments on commit d4d023c

Please sign in to comment.