From 83419ae846778d6911776b4036f07d2b3c57fbc0 Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Mon, 29 Jul 2024 19:01:17 -0700 Subject: [PATCH 01/15] remove npmrc file --- .gitignore | 36 +++++++++++++++++++++++++++++++++--- .npmrc | 1 - 2 files changed, 33 insertions(+), 4 deletions(-) delete mode 100644 .npmrc diff --git a/.gitignore b/.gitignore index 1b0a85b5..891d569d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,38 @@ dist/ -node_modules/ -.DS_Store .env .changeset/README.md .changeset/*.tmp.md oclif.manifest.json -.pnpm-store/ \ No newline at end of file +.pnpm-store/ +.npmrc + +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +# Dependency directory +**/node_modules/** +_node_modules +.pnp.cjs +*.local + +# Coverage directory used by tools like istanbul +coverage + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? +.tmp +_docpress \ No newline at end of file diff --git a/.npmrc b/.npmrc deleted file mode 100644 index 62c30db5..00000000 --- a/.npmrc +++ /dev/null @@ -1 +0,0 @@ -//registry.npmjs.org/:_authToken=${process.env.NPM_TOKEN} From fd8aebbea60fd648701fa2f502a7c6b1f3a448b5 Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Mon, 29 Jul 2024 20:10:53 -0700 Subject: [PATCH 02/15] adds var to hold all model names --- packages/llm/src/model-manager.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/llm/src/model-manager.ts b/packages/llm/src/model-manager.ts index 11adaf19..0519ca91 100644 --- a/packages/llm/src/model-manager.ts +++ b/packages/llm/src/model-manager.ts @@ -32,6 +32,7 @@ const modelConfig = { }; export type AllModels = (typeof modelConfig)[keyof typeof modelConfig][number]; +export const allModels = Object.values(modelConfig).flat(); export function isAllModel(model: string): model is AllModels { return Object.values(modelConfig).some((models) => // @ts-expect-error this could be anything From 22988527135a9c8007b4c3eab4edf699629e8c20 Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Mon, 29 Jul 2024 20:11:44 -0700 Subject: [PATCH 03/15] adds util pkg --- package.json | 3 +- packages/utils/package.json | 31 ++++++ packages/utils/src/index.ts | 1 + .../src/process/convert-dir-to-text-file.ts | 100 ++++++++++++++++++ packages/utils/src/process/index.ts | 3 + packages/utils/tsconfig.cjs.json | 8 ++ packages/utils/tsconfig.json | 21 ++++ pnpm-lock.yaml | 13 +++ 8 files changed, 179 insertions(+), 1 deletion(-) create mode 100644 packages/utils/package.json create mode 100644 packages/utils/src/index.ts create mode 100644 packages/utils/src/process/convert-dir-to-text-file.ts create mode 100644 packages/utils/src/process/index.ts create mode 100644 packages/utils/tsconfig.cjs.json create mode 100644 packages/utils/tsconfig.json diff --git a/package.json b/package.json index d0b9e6b9..9ec00382 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,8 @@ "devDependencies": { "@changesets/cli": "^2.27.7", "ts-node": "^10.9.2", - "typescript": "^5.5.4" + "typescript": "^5.5.4", + "@types/node": "^18" }, "dependencies": { "@langchain/core": "^0.2.18", diff --git a/packages/utils/package.json b/packages/utils/package.json new file mode 100644 index 00000000..4c7504dc --- /dev/null +++ b/packages/utils/package.json @@ -0,0 +1,31 @@ +{ + "name": "@ai-citizens/utils", + "version": "0.0.0", + "description": "a collection of utility functions for AI powered applications", + "type": "module", + "main": "./dist/cjs/index.js", + "module": "./dist/esm/index.js", + "types": "./dist/esm/index.d.ts", + "exports": { + ".": { + "import": "./dist/esm/index.js", + "require": "./dist/cjs/index.js" + }, + "./process": { + "import": "./dist/esm/process/index.js", + "require": "./dist/cjs/process/index.js" + } + }, + "scripts": { + "build": "tsc -p tsconfig.json && tsc -p tsconfig.cjs.json", + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "peerDependencies": { + "typescript": "workspace:*", + "@types/node": "workspace:*" + }, + "devDependencies": {} +} diff --git a/packages/utils/src/index.ts b/packages/utils/src/index.ts new file mode 100644 index 00000000..3dd86872 --- /dev/null +++ b/packages/utils/src/index.ts @@ -0,0 +1 @@ +export * from "./process/index.js"; diff --git a/packages/utils/src/process/convert-dir-to-text-file.ts b/packages/utils/src/process/convert-dir-to-text-file.ts new file mode 100644 index 00000000..b1f8d699 --- /dev/null +++ b/packages/utils/src/process/convert-dir-to-text-file.ts @@ -0,0 +1,100 @@ +import fs from "node:fs/promises"; +import path from "node:path"; + +interface ConvertOptions { + /** + * An array of paths to ignore. + */ + ignore?: string[]; + /** + * If provided, the output will be saved to a file named `converted-dir-output.txt` in the specified directory. + */ + outputPath?: string; +} + +async function convertDirToTextFile( + dirPath: string, + options: ConvertOptions +): Promise { + const { ignore = [], outputPath } = options; + let output = ""; + + // Function to generate directory structure + async function generateDirStructure( + currentPath: string, + prefix = "" + ): Promise { + const entries = await fs.readdir(currentPath, { withFileTypes: true }); + let structure = ""; + + const entryPromises = entries.map(async (entry, index) => { + const relativePath = path.relative( + dirPath, + path.join(currentPath, entry.name) + ); + if (ignore.some((ignorePath) => relativePath.startsWith(ignorePath))) { + return ""; + } + + const isLast = index === entries.length - 1; + const newPrefix = isLast ? `${prefix} ` : `${prefix}│ `; + const entryPrefix = isLast ? `${prefix}└── ` : `${prefix}├── `; + + let entryStructure = `${entryPrefix}${entry.name}\n`; + + if (entry.isDirectory()) { + const subStructure = await generateDirStructure( + path.join(currentPath, entry.name), + newPrefix + ); + entryStructure += subStructure; + } + + return entryStructure; + }); + + const entryStructures = await Promise.all(entryPromises); + structure += entryStructures.join(""); + + return structure; + } + + async function processDirectory(currentPath: string): Promise { + const entries = await fs.readdir(currentPath, { withFileTypes: true }); + + const processTasks = entries.map(async (entry) => { + const fullPath = path.join(currentPath, entry.name); + const relativePath = path.relative(dirPath, fullPath); + + if (ignore.some((ignorePath) => relativePath.startsWith(ignorePath))) { + return; + } + + if (entry.isDirectory()) { + await processDirectory(fullPath); + } else if (entry.isFile()) { + const content = await fs.readFile(fullPath, "utf8"); + output += `-----------------------------\nFile: ${relativePath}\n-----------------------------\n\n${content}\n\n`; + } + }); + + await Promise.all(processTasks); + } + + // Generate directory structure + const dirStructure = await generateDirStructure(dirPath); + output = `${path.basename(dirPath)}/\n${dirStructure}\n${output}`; + + await processDirectory(dirPath); + + if (outputPath) { + await fs.writeFile(`${outputPath}/converted-dir-output.txt`, output); + } + + return output; +} + +export default convertDirToTextFile; + +// example usage +// convertDirToTextFile('./src/lib/utils', {outputPath: './src/lib/utils/output.txt'}) diff --git a/packages/utils/src/process/index.ts b/packages/utils/src/process/index.ts new file mode 100644 index 00000000..4a4153a2 --- /dev/null +++ b/packages/utils/src/process/index.ts @@ -0,0 +1,3 @@ +import convertDirToTextFile from "./convert-dir-to-text-file.js"; + +export { convertDirToTextFile }; diff --git a/packages/utils/tsconfig.cjs.json b/packages/utils/tsconfig.cjs.json new file mode 100644 index 00000000..56a21284 --- /dev/null +++ b/packages/utils/tsconfig.cjs.json @@ -0,0 +1,8 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "module": "CommonJS", + "outDir": "./dist/cjs", + "moduleResolution": "Node" + } +} diff --git a/packages/utils/tsconfig.json b/packages/utils/tsconfig.json new file mode 100644 index 00000000..73a12443 --- /dev/null +++ b/packages/utils/tsconfig.json @@ -0,0 +1,21 @@ +{ + "compilerOptions": { + "incremental": true, + "target": "ESNext", + "module": "ESNext", + "declaration": true, + "esModuleInterop": true, + "outDir": "./dist/esm", + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "Bundler", + "resolveJsonModule": true, + "allowJs": true, + "baseUrl": ".", + "paths": { + "@/*": ["src/*"] + } + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "**/*.spec.ts"] +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 03bbd7f4..b5f85832 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -30,6 +30,9 @@ importers: '@ai-citizens/llm': specifier: workspace:* version: link:../llm + '@ai-citizens/utils': + specifier: workspace:* + version: link:../utils '@langchain/anthropic': specifier: ^0.2.10 version: 0.2.10(openai@4.53.2) @@ -148,6 +151,16 @@ importers: specifier: ^5.3.4 version: 5.3.5(@types/node@22.0.0) + packages/utils: + dependencies: + typescript: + specifier: ^5.5.4 + version: 5.5.4 + devDependencies: + '@types/node': + specifier: ^18 + version: 18.19.42 + packages: '@anthropic-ai/sdk@0.22.0': From fcb5b7cb38dbbd57dd1fc319dd7e597beb7ccb75 Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Mon, 29 Jul 2024 20:12:57 -0700 Subject: [PATCH 04/15] adds check for env vars --- .gitignore | 3 +- packages/llm/src/model-manager.ts | 61 +++++++++++++++++++++++-------- 2 files changed, 48 insertions(+), 16 deletions(-) diff --git a/.gitignore b/.gitignore index 891d569d..de515019 100644 --- a/.gitignore +++ b/.gitignore @@ -35,4 +35,5 @@ coverage *.sln *.sw? .tmp -_docpress \ No newline at end of file +_docpress +tsconfig.tsbuildinfo diff --git a/packages/llm/src/model-manager.ts b/packages/llm/src/model-manager.ts index 0519ca91..dfc707de 100644 --- a/packages/llm/src/model-manager.ts +++ b/packages/llm/src/model-manager.ts @@ -63,14 +63,17 @@ export const openAiModel = ({ maxTokens?: number; model?: OpenAIModel; temperature?: number; -}) => - new ChatOpenAI({ +}) => { + if (!process.env.OPENAI_API_KEY) { + throw new Error("OPENAI_API_KEY is not set"); + } + return new ChatOpenAI({ configuration: { baseURL: baseUrl }, maxTokens, model, temperature, }); - +}; export const groqModel = ({ maxTokens = 1024, model = "llama-3.1-8b-instant", @@ -79,13 +82,16 @@ export const groqModel = ({ maxTokens?: number; model?: GroqModel; temperature?: number; -}) => - new ChatGroq({ +}) => { + if (!process.env.GROQ_API_KEY) { + throw new Error("GROQ_API_KEY is not set"); + } + return new ChatGroq({ maxTokens, model, temperature, }); - +}; export const anthropicModel = ({ maxTokens = 1024, model = "claude-3-haiku-20240307", @@ -94,12 +100,16 @@ export const anthropicModel = ({ maxTokens?: number; model?: AnthropicModel; temperature?: number; -}) => - new ChatAnthropic({ +}) => { + if (!process.env.ANTHROPIC_API_KEY) { + throw new Error("ANTHROPIC_API_KEY is not set"); + } + return new ChatAnthropic({ maxTokens, model, temperature, }); +}; export const googleModel = ({ maxTokens = 1024, @@ -109,14 +119,18 @@ export const googleModel = ({ maxTokens?: number; model?: GoogleModel; temperature?: number; -}) => - new ChatGoogleGenerativeAI({ +}) => { + if (!process.env.GOOGLE_API_KEY) { + throw new Error("GOOGLE_API_KEY is not set"); + } + return new ChatGoogleGenerativeAI({ maxOutputTokens: maxTokens, model, temperature, }); +}; // Any OpenAI compatible endpoint should work here, tested with llama.cpp server -export const localModel = ({ +export async function localModel({ baseURL = "http://192.168.4.195:8080/v1", maxTokens = 1024, model = "hermes-2-pro-llama-3-8b", @@ -126,15 +140,21 @@ export const localModel = ({ maxTokens?: number; model?: string; temperature?: number; -}) => - new ChatOpenAI({ +}) { + // check if the url returns anything + const response = await fetch(baseURL + "/models"); + if (!response.ok) { + throw new Error(`Failed to connect to ${baseURL}`); + } + return new ChatOpenAI({ configuration: { baseURL }, maxTokens, model, temperature, }); +} -export const ollamaModel = ({ +export const ollamaModel = async ({ baseUrl = "http://127.0.0.1:11434", model = "llama3.1", temperature = 0.1, @@ -142,7 +162,18 @@ export const ollamaModel = ({ baseUrl?: string; model?: OllamaModel; temperature?: number; -}) => new ChatOllama({ baseUrl, checkOrPullModel: false, model, temperature }); +}) => { + const response = await fetch(baseUrl + "/models"); + if (!response.ok) { + throw new Error(`Failed to connect to ${baseUrl}`); + } + return new ChatOllama({ + baseUrl, + checkOrPullModel: false, + model, + temperature, + }); +}; export const getModel = ({ baseUrl, From 69244194c62066ce4d9039a79c68a6ffa848f778 Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Mon, 29 Jul 2024 20:13:34 -0700 Subject: [PATCH 05/15] adds convert dir command --- packages/cli/package.json | 1 + packages/cli/src/commands/chat.ts | 4 +- packages/cli/src/commands/cla.ts | 17 +--- packages/cli/src/commands/util/process/dir.ts | 94 +++++++++++++++++++ packages/cli/tsconfig.json | 14 ++- packages/llm/package.json | 2 +- packages/llm/tsconfig.cjs.json | 3 +- packages/ui/.gitignore | 7 -- 8 files changed, 116 insertions(+), 26 deletions(-) create mode 100644 packages/cli/src/commands/util/process/dir.ts diff --git a/packages/cli/package.json b/packages/cli/package.json index 0a75b32e..dc1285c2 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -32,6 +32,7 @@ }, "dependencies": { "@ai-citizens/llm": "workspace:*", + "@ai-citizens/utils": "workspace:*", "@langchain/anthropic": "^0.2.10", "@langchain/google-genai": "^0.0.23", "@langchain/groq": "^0.0.15", diff --git a/packages/cli/src/commands/chat.ts b/packages/cli/src/commands/chat.ts index 9c2351eb..3f4f488e 100644 --- a/packages/cli/src/commands/chat.ts +++ b/packages/cli/src/commands/chat.ts @@ -29,7 +29,7 @@ export default class Chat extends Command { static override description = "Interactive chat with the AI assistant"; public async run(): Promise { - const { args, flags } = await this.parse(Chat); + const { flags } = await this.parse(Chat); const modelName = flags.model || "gpt-4o-mini"; @@ -37,7 +37,7 @@ export default class Chat extends Command { throw new Error(`Invalid model: ${modelName}`); } - const model = getModel({ model: modelName }); + const model = await getModel({ model: modelName }); const parser = new StringOutputParser(); const chain = prompt.pipe(model); diff --git a/packages/cli/src/commands/cla.ts b/packages/cli/src/commands/cla.ts index e9f2413b..c28e5683 100644 --- a/packages/cli/src/commands/cla.ts +++ b/packages/cli/src/commands/cla.ts @@ -3,15 +3,12 @@ import { AIMessage, HumanMessage } from "@langchain/core/messages"; import { StringOutputParser } from "@langchain/core/output_parsers"; import { ChatPromptTemplate } from "@langchain/core/prompts"; import { RunnableWithMessageHistory } from "@langchain/core/runnables"; -import { Command, Flags } from "@oclif/core"; import clipboardy from "clipboardy"; import inquirer from "inquirer"; import { exec } from "node:child_process"; import { getModel, isAllModel } from "@ai-citizens/llm"; -import { config } from "dotenv"; -config({ - path: ["~/ava.env"], -}); +import Chat from "./chat.js"; + const messageHistories: Record = {}; const MAX_OUTPUT_LINES = 100; // Adjust this value as needed @@ -26,13 +23,7 @@ const prompt = ChatPromptTemplate.fromMessages([ ["human", "{input}"], ]); -export default class CLA extends Command { - static override flags = { - model: Flags.string({ - description: "The model to use", - required: false, - }), - }; +export default class CLA extends Chat { static override description = "Interactive AI agent to generate and execute commands based on natural language input"; @@ -45,7 +36,7 @@ export default class CLA extends Command { if (!isAllModel(modelName)) { throw new Error(`Invalid model: ${modelName}`); } - const model = getModel({ + const model = await getModel({ model: modelName, }); const parser = new StringOutputParser(); diff --git a/packages/cli/src/commands/util/process/dir.ts b/packages/cli/src/commands/util/process/dir.ts new file mode 100644 index 00000000..84106215 --- /dev/null +++ b/packages/cli/src/commands/util/process/dir.ts @@ -0,0 +1,94 @@ +import { Args, Command, Flags } from "@oclif/core"; +import "dotenv/config"; +import fs from "node:fs"; + +import { convertDirToTextFile } from "@ai-citizens/utils/process"; + +const defaultIgnore = [ + ".DS_Store", + "package-lock.json", + ".angular", + "yarn.lock", + "pnpm-lock.yaml", + "node_modules", + "dist", +]; + +export default class Dir extends Command { + static override args = { + inputDir: Args.string({ + description: "input directory to convert to text file", + }), + }; + + static override description = "Converts a directory of files to a text file"; + + static override examples = ["<%= config.bin %> <%= command.id %>"]; + + static override flags = { + gitIgnore: Flags.string({ + char: "g", + description: "use .gitignore file to ignore files and directories", + }), + ignore: Flags.string({ + char: "i", + description: "ignore files and directories using comma separated string", + }), + outputFile: Flags.string({ + char: "o", + description: "output file to write to", + }), + }; + + public async run(): Promise { + const { args, flags } = await this.parse(Dir); + + if (!args.inputDir) { + this.error("Input directory is required"); + } + + if (!flags.outputFile) { + this.error("Output file is required"); + } + + const ignore = [...defaultIgnore, ...(flags.ignore?.split(",") || [])]; + + if (flags.gitIgnore) { + ignore.push(...(await getGitIgnore(flags.gitIgnore))); + } + + await convertDirToTextFile(args.inputDir, { + ignore, + outputPath: flags.outputFile, + }); + } +} + +const getGitIgnore = async (gitIgnorePath: string): Promise => { + try { + if (!fs.existsSync(gitIgnorePath)) { + console.warn( + `Warning: .gitignore file not found at ${gitIgnorePath}. Ignoring .gitignore patterns.` + ); + return []; + } + + const stats = fs.statSync(gitIgnorePath); + if (stats.isDirectory()) { + console.warn( + `Warning: ${gitIgnorePath} is a directory, not a file. Ignoring .gitignore patterns.` + ); + return []; + } + + const gitIgnore = fs.readFileSync(gitIgnorePath, "utf8"); + return gitIgnore + .split("\n") + .filter((line) => line.trim() !== "" && !line.startsWith("#")); + } catch (error) { + console.error( + `Error reading .gitignore file at ${gitIgnorePath}: ${error}` + ); + return []; + } +}; diff --git a/packages/cli/tsconfig.json b/packages/cli/tsconfig.json index 27754a18..1eb064ff 100644 --- a/packages/cli/tsconfig.json +++ b/packages/cli/tsconfig.json @@ -1,12 +1,22 @@ { "compilerOptions": { + "baseUrl": ".", + "paths": { + "@ai-citizen/*": ["../*"], + "@ai-citizen/utils": ["../utils"] + }, "declaration": true, "module": "Node16", "outDir": "dist", - "rootDir": "src", + "rootDir": "./src", + "composite": true, "strict": true, "target": "es2022", - "moduleResolution": "node16" + "moduleResolution": "node16", + "typeRoots": ["node_modules/@types", "node_modules/@ai-citizen/types"] + }, + "ts-node": { + "esm": true }, "include": ["./src/**/*"] } diff --git a/packages/llm/package.json b/packages/llm/package.json index df345763..b7dd245e 100644 --- a/packages/llm/package.json +++ b/packages/llm/package.json @@ -13,7 +13,7 @@ } }, "scripts": { - "build": "tsc", + "build": "tsc -p tsconfig.json && tsc -p tsconfig.cjs.json", "test": "echo \"Error: no test specified\" && exit 1" }, "keywords": [], diff --git a/packages/llm/tsconfig.cjs.json b/packages/llm/tsconfig.cjs.json index 8ab4792d..56a21284 100644 --- a/packages/llm/tsconfig.cjs.json +++ b/packages/llm/tsconfig.cjs.json @@ -2,6 +2,7 @@ "extends": "./tsconfig.json", "compilerOptions": { "module": "CommonJS", - "outDir": "./dist/cjs" + "outDir": "./dist/cjs", + "moduleResolution": "Node" } } diff --git a/packages/ui/.gitignore b/packages/ui/.gitignore index a547bf36..cf512c7f 100644 --- a/packages/ui/.gitignore +++ b/packages/ui/.gitignore @@ -1,11 +1,4 @@ # Logs -logs -*.log -npm-debug.log* -yarn-debug.log* -yarn-error.log* -pnpm-debug.log* -lerna-debug.log* node_modules dist From 50df5d820d16dcb76d1c289d3314b4ba9dc6f967 Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Mon, 29 Jul 2024 20:14:43 -0700 Subject: [PATCH 06/15] update readme --- packages/cli/README.md | 52 ++++++++---------------------------------- 1 file changed, 10 insertions(+), 42 deletions(-) diff --git a/packages/cli/README.md b/packages/cli/README.md index bbd04bdd..b052a420 100644 --- a/packages/cli/README.md +++ b/packages/cli/README.md @@ -93,61 +93,29 @@ hello world! (./src/commands/hello/world.ts) # 🔨 Commands -* [`ava chat`](#ava-chat) -* [`ava cla`](#ava-cla) -* [`ava init [CONFIGPATH]`](#ava-init-configpath) +* [`ava util process dir [INPUTDIR]`](#ava-util-process-dir-inputdir) -## `ava chat` +## `ava util process dir [INPUTDIR]` -Interactive chat with the AI assistant +Converts a directory of files to a text file ``` USAGE - $ ava chat [--model ] - -FLAGS - --model= The model to use - -DESCRIPTION - Interactive chat with the AI assistant -``` - -## `ava cla` - -Interactive AI agent to generate and execute commands based on natural language input - -``` -USAGE - $ ava cla [--model ] - -FLAGS - --model= The model to use - -DESCRIPTION - Interactive AI agent to generate and execute commands based on natural language input -``` - -## `ava init [CONFIGPATH]` - -Initialize Ava configuration - -``` -USAGE - $ ava init [CONFIGPATH] [-f] + $ ava util process dir [INPUTDIR] [-g ] [-i ] [-o ] ARGUMENTS - CONFIGPATH Optional path for the config file + INPUTDIR input directory to convert to text file FLAGS - -f, --force Overwrite existing config file + -g, --gitIgnore= use .gitignore file to ignore files and directories + -i, --ignore= ignore files and directories using comma separated string + -o, --outputFile= output file to write to DESCRIPTION - Initialize Ava configuration + Converts a directory of files to a text file EXAMPLES - $ ava init - - $ ava init /custom/path/ava.env + $ ava util process dir ``` From 383966e4491ead3825dbd91e8a8c478dfa791f09 Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Mon, 29 Jul 2024 21:31:57 -0700 Subject: [PATCH 07/15] update readme, cleanup --- packages/cli/README.md | 31 ++++++++----------- packages/cli/src/commands/cla.ts | 2 +- packages/cli/src/commands/init.ts | 2 +- packages/cli/src/commands/util/process/dir.ts | 3 +- packages/llm/package.json | 2 +- packages/llm/src/model-manager.ts | 4 +-- 6 files changed, 20 insertions(+), 24 deletions(-) diff --git a/packages/cli/README.md b/packages/cli/README.md index b052a420..2442ae18 100644 --- a/packages/cli/README.md +++ b/packages/cli/README.md @@ -1,30 +1,23 @@ - - # `oclif` CLI -[![Version](https://img.shields.io/npm/v/oclif.svg)](https://npmjs.org/package/oclif) + -* [`oclif` CLI](#oclif-cli) -* [🗒 Description](#-description) -* [🚀 Getting Started Tutorial](#-getting-started-tutorial) -* [📌 Requirements](#-requirements) -* [📌 Migrating from V1](#-migrating-from-v1) -* [🏗 Usage](#-usage) -* [📚 Examples](#-examples) -* [🔨 Commands](#-commands) -* [🚀 Contributing](#-contributing) -* [🏭 Related Repositories](#-related-repositories) -* [🦔 Learn More](#-learn-more) + +- [🗒 Description](#-description) +- [🔨 Commands](#-commands) +- [🚀 Contributing](#-contributing) +- [🏭 Related Repositories](#-related-repositories) +- [🦔 Learn More](#-learn-more) # 🗒 Description This is the `oclif` CLI for the [Open CLI Framework](https://github.com/oclif/core), that supports the development of oclif plugins and CLIs. -[See the docs for more information](http://oclif.io). + # 🔨 Commands -* [`ava util process dir [INPUTDIR]`](#ava-util-process-dir-inputdir) + +- [`ava util process dir [INPUTDIR]`](#ava-util-process-dir-inputdir) ## `ava util process dir [INPUTDIR]` @@ -117,6 +111,7 @@ DESCRIPTION EXAMPLES $ ava util process dir ``` + # 🚀 Contributing diff --git a/packages/cli/src/commands/cla.ts b/packages/cli/src/commands/cla.ts index c28e5683..c41fcdcd 100644 --- a/packages/cli/src/commands/cla.ts +++ b/packages/cli/src/commands/cla.ts @@ -10,7 +10,7 @@ import { getModel, isAllModel } from "@ai-citizens/llm"; import Chat from "./chat.js"; const messageHistories: Record = {}; -const MAX_OUTPUT_LINES = 100; // Adjust this value as needed +const MAX_OUTPUT_LINES = 100; const systemPrompt = `You are an AI assistant that generates shell commands based on user input. Generate only the command itself, without any explanations or additional text. diff --git a/packages/cli/src/commands/init.ts b/packages/cli/src/commands/init.ts index cac78f1c..18ae8b94 100644 --- a/packages/cli/src/commands/init.ts +++ b/packages/cli/src/commands/init.ts @@ -25,7 +25,7 @@ export default class Init extends Command { static override examples = [ "<%= config.bin %> <%= command.id %>", - "<%= config.bin %> <%= command.id %> /custom/path/ava.env", + "<%= config.bin %> <%= command.id %> /custom/path", ]; static override flags = { diff --git a/packages/cli/src/commands/util/process/dir.ts b/packages/cli/src/commands/util/process/dir.ts index 84106215..d2c0ba6c 100644 --- a/packages/cli/src/commands/util/process/dir.ts +++ b/packages/cli/src/commands/util/process/dir.ts @@ -28,7 +28,8 @@ export default class Dir extends Command { static override flags = { gitIgnore: Flags.string({ char: "g", - description: "use .gitignore file to ignore files and directories", + description: + "path to .gitignore file to use for ignoring files and directories", }), ignore: Flags.string({ char: "i", diff --git a/packages/llm/package.json b/packages/llm/package.json index b7dd245e..3611f224 100644 --- a/packages/llm/package.json +++ b/packages/llm/package.json @@ -27,7 +27,7 @@ "@langchain/openai": "^0.2.5" }, "peerDependencies": { - "dotenv": "^16.4.3" + "dotenv": "workspace:*" }, "devDependencies": { "@types/node": "^18" diff --git a/packages/llm/src/model-manager.ts b/packages/llm/src/model-manager.ts index dfc707de..7ce38a22 100644 --- a/packages/llm/src/model-manager.ts +++ b/packages/llm/src/model-manager.ts @@ -131,7 +131,7 @@ export const googleModel = ({ }; // Any OpenAI compatible endpoint should work here, tested with llama.cpp server export async function localModel({ - baseURL = "http://192.168.4.195:8080/v1", + baseURL = process.env.LOCAL_OPENAI_BASE_URL || "http://localhost:8080/v1", maxTokens = 1024, model = "hermes-2-pro-llama-3-8b", temperature = 0.5, @@ -155,7 +155,7 @@ export async function localModel({ } export const ollamaModel = async ({ - baseUrl = "http://127.0.0.1:11434", + baseUrl = process.env.OLLAMA_BASE_URL || "http://127.0.0.1:11434", model = "llama3.1", temperature = 0.1, }: { From d6e19b1376d8098b4d727b8eaa247398937708ac Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Mon, 29 Jul 2024 22:23:39 -0700 Subject: [PATCH 08/15] adds config --- acai.config.json | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 acai.config.json diff --git a/acai.config.json b/acai.config.json new file mode 100644 index 00000000..5d79fb82 --- /dev/null +++ b/acai.config.json @@ -0,0 +1,28 @@ +{ + "modelConfig": { + "anthropic": [ + "claude-3-5-sonnet-20240620", + "claude-3-haiku-20240307", + "claude-3-opus-20240229", + "claude-3-sonnet-20240229" + ], + "google": ["gemini-1.0-pro", "gemini-1.5-flash", "gemini-1.5-pro"], + "groq": [ + "gemma-7b-it", + "gemma2-9b-it", + "llama-3.1-8b-instant", + "llama-3.1-70b-versatile", + "mixtral-8x7b-32768" + ], + "local": ["hermes-2-pro-llama-3-8b"], + "ollama": ["llama3.1"], + "openAI": [ + "gpt-3.5-turbo", + "gpt-4", + "gpt-4-0125-preview", + "gpt-4-turbo", + "gpt-4o", + "gpt-4o-mini" + ] + } +} From 36a35f0a92416fef60e95a2bd96a3bb946a90a20 Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Mon, 29 Jul 2024 22:24:08 -0700 Subject: [PATCH 09/15] adds model select flag and ux --- packages/cli/src/commands/chat.ts | 51 ++++++++++++++++++++++++++++++- 1 file changed, 50 insertions(+), 1 deletion(-) diff --git a/packages/cli/src/commands/chat.ts b/packages/cli/src/commands/chat.ts index 3f4f488e..497a36dd 100644 --- a/packages/cli/src/commands/chat.ts +++ b/packages/cli/src/commands/chat.ts @@ -5,10 +5,23 @@ import { RunnableWithMessageHistory } from "@langchain/core/runnables"; import { Command, Flags } from "@oclif/core"; import inquirer from "inquirer"; import { config } from "dotenv"; + +const configPath = path.join( + process.env.ACAI_CONFIG_PATH || process.cwd(), + "acai.config.json" +); config({ path: ["~/ava.env"], }); +import fs from "fs"; +import path from "path"; + +function getModelConfig() { + const configFile = fs.readFileSync(configPath, "utf8"); + return JSON.parse(configFile).modelConfig; +} + import { getModel, isAllModel } from "@ai-citizens/llm"; const messageHistories: Record = {}; @@ -25,13 +38,49 @@ export default class Chat extends Command { description: "The model to use", required: false, }), + modelSelect: Flags.boolean({ + description: "Select a model", + required: false, + char: "m", + }), }; static override description = "Interactive chat with the AI assistant"; public async run(): Promise { const { flags } = await this.parse(Chat); - const modelName = flags.model || "gpt-4o-mini"; + let modelName = flags.model || "gpt-4o-mini"; + if (!isAllModel(modelName)) { + this.log( + `------------------------------------------------\n\n Invalid model: ${modelName} \n\n Use the --modelSelect || -m flag to select a model\n\n------------------------------------------------` + ); + } + + if (flags.modelSelect) { + const modelConfig = getModelConfig(); + + // First, select the model provider + const { selectedProvider } = await inquirer.prompt([ + { + type: "list", + name: "selectedProvider", + message: "Select a model provider:", + choices: Object.keys(modelConfig), + }, + ]); + + // Then, select the specific model from the chosen provider + const { selectedModel } = await inquirer.prompt([ + { + type: "list", + name: "selectedModel", + message: `Select a ${selectedProvider} model:`, + choices: modelConfig[selectedProvider], + }, + ]); + + modelName = selectedModel; + } if (!isAllModel(modelName)) { throw new Error(`Invalid model: ${modelName}`); From 6c00998ef296639116e88950dbf75e1df683d112 Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Mon, 29 Jul 2024 22:24:29 -0700 Subject: [PATCH 10/15] cleanup --- packages/llm/src/model-manager.ts | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/llm/src/model-manager.ts b/packages/llm/src/model-manager.ts index 7ce38a22..f0cc0418 100644 --- a/packages/llm/src/model-manager.ts +++ b/packages/llm/src/model-manager.ts @@ -104,6 +104,7 @@ export const anthropicModel = ({ if (!process.env.ANTHROPIC_API_KEY) { throw new Error("ANTHROPIC_API_KEY is not set"); } + return new ChatAnthropic({ maxTokens, model, @@ -163,10 +164,6 @@ export const ollamaModel = async ({ model?: OllamaModel; temperature?: number; }) => { - const response = await fetch(baseUrl + "/models"); - if (!response.ok) { - throw new Error(`Failed to connect to ${baseUrl}`); - } return new ChatOllama({ baseUrl, checkOrPullModel: false, From bf524e36d1b05ffc07b4a70456d10b27cfbc1d14 Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Mon, 29 Jul 2024 23:21:57 -0700 Subject: [PATCH 11/15] update base class, improve response handling in cla --- packages/cli/package.json | 3 +- packages/cli/src/commands/chat.ts | 78 ++++++++++------------ packages/cli/src/commands/cla.ts | 48 +++++++++++-- packages/cli/src/utils/get-model-config.ts | 23 +++++++ packages/llm/package.json | 2 +- packages/utils/package.json | 4 +- pnpm-lock.yaml | 41 ++++-------- 7 files changed, 117 insertions(+), 82 deletions(-) create mode 100644 packages/cli/src/utils/get-model-config.ts diff --git a/packages/cli/package.json b/packages/cli/package.json index dc1285c2..ff0eda26 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -43,7 +43,8 @@ "@oclif/plugin-plugins": "^5", "clipboardy": "^4.0.0", "dotenv": "^16.4.5", - "inquirer": "^9.2.23" + "inquirer": "^9.2.23", + "fast-xml-parser": "^4.4.0" }, "devDependencies": { "@oclif/dev-cli": "^1.26.10", diff --git a/packages/cli/src/commands/chat.ts b/packages/cli/src/commands/chat.ts index 497a36dd..5c00a64a 100644 --- a/packages/cli/src/commands/chat.ts +++ b/packages/cli/src/commands/chat.ts @@ -4,25 +4,9 @@ import { ChatPromptTemplate } from "@langchain/core/prompts"; import { RunnableWithMessageHistory } from "@langchain/core/runnables"; import { Command, Flags } from "@oclif/core"; import inquirer from "inquirer"; -import { config } from "dotenv"; - -const configPath = path.join( - process.env.ACAI_CONFIG_PATH || process.cwd(), - "acai.config.json" -); -config({ - path: ["~/ava.env"], -}); - -import fs from "fs"; -import path from "path"; - -function getModelConfig() { - const configFile = fs.readFileSync(configPath, "utf8"); - return JSON.parse(configFile).modelConfig; -} - -import { getModel, isAllModel } from "@ai-citizens/llm"; +import { getModelConfig } from "../utils/get-model-config.js"; +import * as llm from "@ai-citizens/llm"; +import { XMLParser } from "fast-xml-parser"; const messageHistories: Record = {}; @@ -46,11 +30,38 @@ export default class Chat extends Command { }; static override description = "Interactive chat with the AI assistant"; + // New method to handle model selection + public async selectModel( + modelConfig: Record + ): Promise { + // First, select the model provider + const { selectedProvider } = await inquirer.prompt([ + { + type: "list", + name: "selectedProvider", + message: "Select a model provider:", + choices: Object.keys(modelConfig), + }, + ]); + + // Then, select the specific model from the chosen provider + const { selectedModel } = await inquirer.prompt([ + { + type: "list", + name: "selectedModel", + message: `Select a ${selectedProvider} model:`, + choices: modelConfig[selectedProvider], + }, + ]); + + return selectedModel; + } + public async run(): Promise { const { flags } = await this.parse(Chat); let modelName = flags.model || "gpt-4o-mini"; - if (!isAllModel(modelName)) { + if (!llm.isAllModel(modelName)) { this.log( `------------------------------------------------\n\n Invalid model: ${modelName} \n\n Use the --modelSelect || -m flag to select a model\n\n------------------------------------------------` ); @@ -58,35 +69,14 @@ export default class Chat extends Command { if (flags.modelSelect) { const modelConfig = getModelConfig(); - - // First, select the model provider - const { selectedProvider } = await inquirer.prompt([ - { - type: "list", - name: "selectedProvider", - message: "Select a model provider:", - choices: Object.keys(modelConfig), - }, - ]); - - // Then, select the specific model from the chosen provider - const { selectedModel } = await inquirer.prompt([ - { - type: "list", - name: "selectedModel", - message: `Select a ${selectedProvider} model:`, - choices: modelConfig[selectedProvider], - }, - ]); - - modelName = selectedModel; + modelName = await this.selectModel(modelConfig); } - if (!isAllModel(modelName)) { + if (!llm.isAllModel(modelName)) { throw new Error(`Invalid model: ${modelName}`); } - const model = await getModel({ model: modelName }); + const model = await llm.getModel({ model: modelName }); const parser = new StringOutputParser(); const chain = prompt.pipe(model); diff --git a/packages/cli/src/commands/cla.ts b/packages/cli/src/commands/cla.ts index c41fcdcd..9daee46a 100644 --- a/packages/cli/src/commands/cla.ts +++ b/packages/cli/src/commands/cla.ts @@ -8,14 +8,34 @@ import inquirer from "inquirer"; import { exec } from "node:child_process"; import { getModel, isAllModel } from "@ai-citizens/llm"; import Chat from "./chat.js"; +import { getModelConfig } from "../utils/get-model-config.js"; +import { XMLParser } from "fast-xml-parser"; const messageHistories: Record = {}; const MAX_OUTPUT_LINES = 100; -const systemPrompt = `You are an AI assistant that generates shell commands based on user input. -Generate only the command itself, without any explanations or additional text. -If the user asks for something that cannot be done with a single shell command, explain why and suggest alternatives. -You can refer to previous command outputs when generating new commands.`; +const systemPrompt = `You are an AI assistant specialized in generating shell commands based on user input. Your task is to interpret the user's request and provide an appropriate shell command or explain why the request cannot be fulfilled with a single command. + +Rules for generating commands: +1. Generate only the command itself, without any explanations or additional text. +2. Ensure the command is a valid shell command that can be executed in a standard Unix-like environment. +3. If multiple commands are needed, use appropriate operators to combine them (e.g., &&, ||, |). +4. Use common shell utilities and avoid assuming the presence of specialized tools unless explicitly mentioned by the user. +5. Prioritize safety and avoid destructive commands unless explicitly requested. + +If the user asks for something that cannot be done with a single shell command: +1. Explain why it's not possible in a brief sentence. +2. Suggest alternatives or a series of commands that could achieve the desired result. + +You can refer to previous command outputs when generating new commands. If the user's input references a previous output, use the information provided in the {{PREVIOUS_OUTPUT}} variable to inform your command generation. + +When responding, provide your output in the following format: +Your explanation or helpful tips go here +Your generated shell command or explanation goes here + +If there is any previous command output to consider, it will be provided as well. + +Generate the appropriate shell command or explanation based on the user's input and any relevant previous output.`; const prompt = ChatPromptTemplate.fromMessages([ ["system", systemPrompt], @@ -31,14 +51,20 @@ export default class CLA extends Chat { public async run(): Promise { const { flags } = await this.parse(CLA); - const modelName = flags.model || "gpt-4o-mini"; + let modelName = flags.model || "gpt-4o-mini"; + + if (flags.modelSelect) { + modelName = await this.selectModel(getModelConfig()); + } if (!isAllModel(modelName)) { throw new Error(`Invalid model: ${modelName}`); } + const model = await getModel({ model: modelName, }); + const parser = new StringOutputParser(); const chain = prompt.pipe(model).pipe(parser); @@ -78,12 +104,20 @@ export default class CLA extends Chat { : userInput; try { - const generatedCommand = await withMessageHistory.invoke( + const llmResponse = await withMessageHistory.invoke( { input: fullInput }, config ); - this.log("Generated command:", generatedCommand); + const parser = new XMLParser(); + const xmlDoc = parser.parse(`${llmResponse}`); + const explanation = xmlDoc.root.explanation || ""; + const generatedCommand = xmlDoc.root.command || ""; + + if (explanation) { + this.log("Explanation:", explanation); + } + this.log("Generated command:", generatedCommand); // @ts-ignore const { execute } = await inquirer.prompt([ { diff --git a/packages/cli/src/utils/get-model-config.ts b/packages/cli/src/utils/get-model-config.ts new file mode 100644 index 00000000..149e1a41 --- /dev/null +++ b/packages/cli/src/utils/get-model-config.ts @@ -0,0 +1,23 @@ +import fs from "fs"; +import path from "path"; +import { config } from "dotenv"; + +const configPath = path.join( + process.env.ACAI_CONFIG_PATH || process.cwd(), + "acai.config.json" +); +config({ + path: [`${configPath}/.env`], +}); + +const modelConfigCache: Record = {}; + +export function getModelConfig() { + if (modelConfigCache[configPath]) { + return modelConfigCache[configPath]; + } + const configFile = fs.readFileSync(configPath, "utf8"); + const modelConfig = JSON.parse(configFile).modelConfig; + modelConfigCache[configPath] = modelConfig; + return modelConfig; +} diff --git a/packages/llm/package.json b/packages/llm/package.json index 3611f224..16ed93e8 100644 --- a/packages/llm/package.json +++ b/packages/llm/package.json @@ -27,7 +27,7 @@ "@langchain/openai": "^0.2.5" }, "peerDependencies": { - "dotenv": "workspace:*" + "dotenv": "^16.4.5" }, "devDependencies": { "@types/node": "^18" diff --git a/packages/utils/package.json b/packages/utils/package.json index 4c7504dc..b1c73809 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -24,8 +24,8 @@ "author": "", "license": "ISC", "peerDependencies": { - "typescript": "workspace:*", - "@types/node": "workspace:*" + "typescript": "^5.5.4", + "@types/node": "^18" }, "devDependencies": {} } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b5f85832..f04e241c 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -18,9 +18,12 @@ importers: '@changesets/cli': specifier: ^2.27.7 version: 2.27.7 + '@types/node': + specifier: ^18 + version: 18.19.42 ts-node: specifier: ^10.9.2 - version: 10.9.2(@swc/core@1.7.3)(@types/node@22.0.0)(typescript@5.5.4) + version: 10.9.2(@swc/core@1.7.3)(@types/node@18.19.42)(typescript@5.5.4) typescript: specifier: ^5.5.4 version: 5.5.4 @@ -66,6 +69,9 @@ importers: dotenv: specifier: ^16.4.5 version: 16.4.5 + fast-xml-parser: + specifier: ^4.4.0 + version: 4.4.1 inquirer: specifier: ^9.2.23 version: 9.3.6 @@ -104,7 +110,7 @@ importers: specifier: ^0.2.5 version: 0.2.5 dotenv: - specifier: ^16.4.3 + specifier: ^16.4.5 version: 16.4.5 devDependencies: '@types/node': @@ -153,13 +159,12 @@ importers: packages/utils: dependencies: - typescript: - specifier: ^5.5.4 - version: 5.5.4 - devDependencies: '@types/node': specifier: ^18 version: 18.19.42 + typescript: + specifier: ^5.5.4 + version: 5.5.4 packages: @@ -3193,6 +3198,7 @@ snapshots: '@types/node@22.0.0': dependencies: undici-types: 6.11.1 + optional: true '@types/prop-types@15.7.12': {} @@ -4729,26 +4735,6 @@ snapshots: optionalDependencies: '@swc/core': 1.7.3 - ts-node@10.9.2(@swc/core@1.7.3)(@types/node@22.0.0)(typescript@5.5.4): - dependencies: - '@cspotcode/source-map-support': 0.8.1 - '@tsconfig/node10': 1.0.11 - '@tsconfig/node12': 1.0.11 - '@tsconfig/node14': 1.0.3 - '@tsconfig/node16': 1.0.4 - '@types/node': 22.0.0 - acorn: 8.12.1 - acorn-walk: 8.3.3 - arg: 4.1.3 - create-require: 1.1.1 - diff: 4.0.2 - make-error: 1.3.6 - typescript: 5.5.4 - v8-compile-cache-lib: 3.0.1 - yn: 3.1.1 - optionalDependencies: - '@swc/core': 1.7.3 - tslib@2.6.3: {} tunnel-agent@0.6.0: @@ -4773,7 +4759,8 @@ snapshots: undici-types@5.26.5: {} - undici-types@6.11.1: {} + undici-types@6.11.1: + optional: true universalify@0.1.2: {} From ae8322987ffd52a26e7bb1162b3202e55ad47248 Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Mon, 29 Jul 2024 23:34:18 -0700 Subject: [PATCH 12/15] update config and readme --- example.env | 4 +++- packages/cli/README.md | 12 +++++++----- packages/cli/src/utils/get-model-config.ts | 14 ++++++++++++-- 3 files changed, 22 insertions(+), 8 deletions(-) diff --git a/example.env b/example.env index 87ff6b3e..4233c568 100644 --- a/example.env +++ b/example.env @@ -6,4 +6,6 @@ TAVILY_API_KEY= ANTHROPIC_API_KEY= GOOGLE_API_KEY= GROQ_API_KEY= -UNSTRUCTURED_API_KEY= \ No newline at end of file +UNSTRUCTURED_API_KEY= +LOCAL_OPENAI_BASE_URL= +OLLAMA_BASE_URL= \ No newline at end of file diff --git a/packages/cli/README.md b/packages/cli/README.md index 2442ae18..860a5030 100644 --- a/packages/cli/README.md +++ b/packages/cli/README.md @@ -17,21 +17,23 @@ This is the `oclif` CLI for the [Open CLI Framework](https://github.com/oclif/core), that supports the development of oclif plugins and CLIs. - # 🚀 Getting Started Tutorial -The [Getting Started tutorial](http://oclif.io/docs/introduction) is a step-by-step guide to introduce you to oclif. If you have not developed anything in a command line before, this tutorial is a great place to get started. +Install the CLI globally: -See [Usage](#-usage) below for an overview of the `oclif` CLI. +`npm i -g @ai-citizens/ava` + +Run `ava --help` to see what commands are available. # 📌 Requirements Currently, Node 18+ is supported. We support the [LTS versions](https://nodejs.org/en/about/releases) of Node. You can add the [node](https://www.npmjs.com/package/node) package to your CLI to ensure users are running a specific version of Node. -# 📌 Migrating from V1 + ## Breaking Changes diff --git a/packages/cli/src/utils/get-model-config.ts b/packages/cli/src/utils/get-model-config.ts index 149e1a41..74053701 100644 --- a/packages/cli/src/utils/get-model-config.ts +++ b/packages/cli/src/utils/get-model-config.ts @@ -18,6 +18,16 @@ export function getModelConfig() { } const configFile = fs.readFileSync(configPath, "utf8"); const modelConfig = JSON.parse(configFile).modelConfig; - modelConfigCache[configPath] = modelConfig; - return modelConfig; + + // Filter out empty arrays from the modelConfig + const filteredModelConfig = Object.entries(modelConfig).reduce< + Record + >((acc, [key, value]) => { + if (!Array.isArray(value) || value.length > 0) { + acc[key] = value; + } + return acc; + }, {}); + modelConfigCache[configPath] = filteredModelConfig; + return filteredModelConfig; } From b34e75723eb5dcef74a4637c57837f2fbbc6238c Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Tue, 30 Jul 2024 21:20:55 -0700 Subject: [PATCH 13/15] remove check from local model --- packages/llm/src/model-manager.ts | 5 ----- 1 file changed, 5 deletions(-) diff --git a/packages/llm/src/model-manager.ts b/packages/llm/src/model-manager.ts index f0cc0418..bf3954bc 100644 --- a/packages/llm/src/model-manager.ts +++ b/packages/llm/src/model-manager.ts @@ -142,11 +142,6 @@ export async function localModel({ model?: string; temperature?: number; }) { - // check if the url returns anything - const response = await fetch(baseURL + "/models"); - if (!response.ok) { - throw new Error(`Failed to connect to ${baseURL}`); - } return new ChatOpenAI({ configuration: { baseURL }, maxTokens, From a9272b1df2185fdc42bdfa9a25d7abd5f14f1e17 Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Tue, 30 Jul 2024 21:30:55 -0700 Subject: [PATCH 14/15] pass dir name to generated file --- packages/utils/src/process/convert-dir-to-text-file.ts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/utils/src/process/convert-dir-to-text-file.ts b/packages/utils/src/process/convert-dir-to-text-file.ts index b1f8d699..30aabc38 100644 --- a/packages/utils/src/process/convert-dir-to-text-file.ts +++ b/packages/utils/src/process/convert-dir-to-text-file.ts @@ -83,14 +83,15 @@ async function convertDirToTextFile( // Generate directory structure const dirStructure = await generateDirStructure(dirPath); - output = `${path.basename(dirPath)}/\n${dirStructure}\n${output}`; + const dirName = path.basename(dirPath); + output = `${dirName}/\n${dirStructure}\n${output}`; await processDirectory(dirPath); if (outputPath) { - await fs.writeFile(`${outputPath}/converted-dir-output.txt`, output); + const outputFileName = `${dirName}-converted-dir-output.txt`; + await fs.writeFile(path.join(outputPath, outputFileName), output); } - return output; } From 78aa8ed10eaec6f97d20abc12a6d4bca9719efae Mon Sep 17 00:00:00 2001 From: Josh Mabry Date: Tue, 30 Jul 2024 21:56:52 -0700 Subject: [PATCH 15/15] cleanup and bump --- .changeset/mighty-numbers-switch.md | 7 +++++++ .gitignore | 2 ++ acai.config.json => ava.config.json | 0 example.env => example.ava.env | 0 packages/cli/src/commands/chat.ts | 1 - packages/cli/src/commands/init.ts | 20 ++++++++++---------- 6 files changed, 19 insertions(+), 11 deletions(-) create mode 100644 .changeset/mighty-numbers-switch.md rename acai.config.json => ava.config.json (100%) rename example.env => example.ava.env (100%) diff --git a/.changeset/mighty-numbers-switch.md b/.changeset/mighty-numbers-switch.md new file mode 100644 index 00000000..09250d27 --- /dev/null +++ b/.changeset/mighty-numbers-switch.md @@ -0,0 +1,7 @@ +--- +"@ai-citizens/utils": patch +"@ai-citizens/ava": patch +"@ai-citizens/llm": patch +--- + +adds utils, additional model handling improvements diff --git a/.gitignore b/.gitignore index de515019..a2d3d204 100644 --- a/.gitignore +++ b/.gitignore @@ -37,3 +37,5 @@ coverage .tmp _docpress tsconfig.tsbuildinfo +.env +ava.env \ No newline at end of file diff --git a/acai.config.json b/ava.config.json similarity index 100% rename from acai.config.json rename to ava.config.json diff --git a/example.env b/example.ava.env similarity index 100% rename from example.env rename to example.ava.env diff --git a/packages/cli/src/commands/chat.ts b/packages/cli/src/commands/chat.ts index 5c00a64a..6c9ed5ea 100644 --- a/packages/cli/src/commands/chat.ts +++ b/packages/cli/src/commands/chat.ts @@ -6,7 +6,6 @@ import { Command, Flags } from "@oclif/core"; import inquirer from "inquirer"; import { getModelConfig } from "../utils/get-model-config.js"; import * as llm from "@ai-citizens/llm"; -import { XMLParser } from "fast-xml-parser"; const messageHistories: Record = {}; diff --git a/packages/cli/src/commands/init.ts b/packages/cli/src/commands/init.ts index 18ae8b94..3936e4b7 100644 --- a/packages/cli/src/commands/init.ts +++ b/packages/cli/src/commands/init.ts @@ -1,7 +1,5 @@ import { Args, Command, Flags } from "@oclif/core"; import * as fs from "fs"; -import * as path from "path"; -import * as os from "os"; import inquirer from "inquirer"; enum ApiKeys { @@ -11,6 +9,8 @@ enum ApiKeys { GOOGLE_API_KEY = "GOOGLE_API_KEY", GROQ_API_KEY = "GROQ_API_KEY", UNSTRUCTURED_API_KEY = "UNSTRUCTURED_API_KEY", + LOCAL_OPENAI_BASE_URL = "LOCAL_OPENAI_BASE_URL", + OLLAMA_BASE_URL = "OLLAMA_BASE_URL", } export default class Init extends Command { @@ -71,22 +71,22 @@ export default class Init extends Command { public async run(): Promise { const { args, flags } = await this.parse(Init); + const currentDir = process.cwd(); + const configPath = args.configPath || currentDir; - const configPath = args.configPath || path.join(os.homedir(), "ava.env"); + let env = this.readExistingConfig(configPath + "/ava.env"); - let config = this.readExistingConfig(configPath); - - if (Object.keys(config).length > 0 && !flags.force) { + if (Object.keys(env).length > 0 && !flags.force) { this.log( `Existing config file found at ${configPath}. Updating with missing keys.` ); } else { - config = {}; + env = {}; this.log(`Creating new config file at ${configPath}.`); } try { - const updatedConfig = await this.promptForMissingKeys(config); + const updatedConfig = await this.promptForMissingKeys(env); const configContent = Object.entries(updatedConfig) .map(([key, value]) => `${key}=${value}`) @@ -95,13 +95,13 @@ export default class Init extends Command { fs.writeFileSync(configPath, configContent); this.log( `Ava config file ${ - Object.keys(config).length > 0 ? "updated" : "created" + Object.keys(env).length > 0 ? "updated" : "created" } at: ${configPath}` ); } catch (error) { this.error( `Failed to ${ - Object.keys(config).length > 0 ? "update" : "create" + Object.keys(env).length > 0 ? "update" : "create" } config file: ${error}` ); }