Skip to content

Commit

Permalink
Merge pull request #12 from mabry1985/moar-updates
Browse files Browse the repository at this point in the history
Moar updates
  • Loading branch information
mabry1985 authored Aug 3, 2024
2 parents 40e43d3 + b230327 commit 47b6885
Show file tree
Hide file tree
Showing 71 changed files with 4,475 additions and 1,164 deletions.
10 changes: 10 additions & 0 deletions .changeset/young-sloths-draw.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
---
"@ai-citizens/prompts": patch
"@ai-citizens/graph": patch
"@ai-citizens/tools": patch
"@ai-citizens/utils": patch
"@ai-citizens/ava": patch
"@ai-citizens/llm": patch
---

additional cli improvements, new graphs and functionality
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -38,4 +38,5 @@ coverage
_docpress
tsconfig.tsbuildinfo
.env
ava.env
ava.env
langgraphjs
76 changes: 52 additions & 24 deletions ava.config.json
Original file line number Diff line number Diff line change
@@ -1,28 +1,56 @@
{
"modelConfig": {
"anthropic": [
"claude-3-5-sonnet-20240620",
"claude-3-haiku-20240307",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229"
],
"google": ["gemini-1.0-pro", "gemini-1.5-flash", "gemini-1.5-pro"],
"groq": [
"gemma-7b-it",
"gemma2-9b-it",
"llama-3.1-8b-instant",
"llama-3.1-70b-versatile",
"mixtral-8x7b-32768"
],
"local": ["hermes-2-pro-llama-3-8b"],
"ollama": ["llama3.1"],
"openAI": [
"gpt-3.5-turbo",
"gpt-4",
"gpt-4-0125-preview",
"gpt-4-turbo",
"gpt-4o",
"gpt-4o-mini"
]
"anthropic": {
"defaultModel": "claude-3-5-sonnet-20240620",
"temperature": 0.5,
"maxTokens": 8192,
"models": [
"claude-3-5-sonnet-20240620",
"claude-3-haiku-20240307",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229"
]
},
"google": {
"defaultModel": "gemini-1.5-pro",
"temperature": 0.5,
"maxTokens": 8192,
"models": ["gemini-1.0-pro", "gemini-1.5-flash", "gemini-1.5-pro"]
},
"openAI": {
"defaultModel": "gpt-4o",
"temperature": 0.5,
"maxTokens": 8192,
"models": [
"gpt-3.5-turbo",
"gpt-4",
"gpt-4-0125-preview",
"gpt-4-turbo",
"gpt-4o",
"gpt-4o-mini"
]
},
"groq": {
"defaultModel": "llama-3.1-8b-instant",
"temperature": 0.5,
"maxTokens": 8192,
"models": [
"llama-3.1-8b-instant",
"llama-3.1-70b-versatile",
"mixtral-8x7b-32768"
]
},
"ollama": {
"defaultModel": "llama3.1",
"temperature": 0.5,
"maxTokens": 8192,
"models": ["llama3.1"]
},
"local": {
"defaultModel": "hermes-2-pro-llama-3-8b",
"temperature": 0.5,
"maxTokens": 8192,
"models": ["hermes-2-pro-llama-3-8b"]
}
}
}
13 changes: 9 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
"version": "0.0.0",
"description": "",
"scripts": {
"build": "pnpm -r run build",
"clean": "pnpm -r run clean",
"build": "pnpm run clean && pnpm -r run build",
"ci:publish": "pnpm publish --access public -r --filter \"@ai-citizens/*\" --no-git-checks",
"ci:version": "changeset version",
"publish": "pnpm publish -r --filter \"@ai-citizens/*\" --no-git-checks",
Expand All @@ -16,12 +17,16 @@
"license": "ISC",
"devDependencies": {
"@changesets/cli": "^2.27.7",
"@types/node": "^18",
"rimraf": "^6.0.1",
"ts-node": "^10.9.2",
"typescript": "^5.5.4",
"@types/node": "^18"
"typescript": "^5.5.4"
},
"dependencies": {
"@langchain/core": "^0.2.18",
"@ai-citizens/llm": "workspace:*",
"@ai-citizens/prompts": "workspace:*",
"@ai-citizens/tools": "workspace:*",
"@ai-citizens/utils": "workspace:*",
"dotenv": "^16.4.5"
}
}
64 changes: 64 additions & 0 deletions packages/cli/ava.config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
{
"modelConfig": {
"anthropic": {
"defaultModel": "claude-3-5-sonnet-20240620",
"temperature": 0.5,
"maxTokens": 8192,
"models": [
"claude-3-5-sonnet-20240620",
"claude-3-haiku-20240307",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229"
]
},
"google": {
"defaultModel": "gemini-1.5-pro",
"temperature": 0.5,
"maxTokens": 8192,
"models": [
"gemini-1.0-pro",
"gemini-1.5-flash",
"gemini-1.5-pro"
]
},
"openAI": {
"defaultModel": "gpt-4o",
"temperature": 0.5,
"maxTokens": 8192,
"models": [
"gpt-3.5-turbo",
"gpt-4",
"gpt-4-0125-preview",
"gpt-4-turbo",
"gpt-4o",
"gpt-4o-mini"
]
},
"groq": {
"defaultModel": "llama-3.1-8b-instant",
"temperature": 0.5,
"maxTokens": 8192,
"models": [
"llama-3.1-8b-instant",
"llama-3.1-70b-versatile",
"mixtral-8x7b-32768"
]
},
"ollama": {
"defaultModel": "llama3.1",
"temperature": 0.5,
"maxTokens": 8192,
"models": [
"llama3.1"
]
},
"local": {
"defaultModel": "hermes-2-pro-llama-3-8b",
"temperature": 0.5,
"maxTokens": 8192,
"models": [
"hermes-2-pro-llama-3-8b"
]
}
}
}
17 changes: 7 additions & 10 deletions packages/cli/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,27 +31,24 @@
"topicSeparator": " "
},
"dependencies": {
"@ai-citizens/graph": "workspace:*",
"@ai-citizens/llm": "workspace:*",
"@ai-citizens/prompts": "workspace:*",
"@ai-citizens/tools": "workspace:*",
"@ai-citizens/utils": "workspace:*",
"@langchain/anthropic": "^0.2.10",
"@langchain/google-genai": "^0.0.23",
"@langchain/groq": "^0.0.15",
"@langchain/ollama": "^0.0.2",
"@langchain/openai": "^0.2.5",
"@oclif/core": "^4.0.17",
"@oclif/plugin-help": "^6",
"@oclif/plugin-plugins": "^5",
"clipboardy": "^4.0.0",
"dotenv": "^16.4.5",
"inquirer": "^9.2.23",
"fast-xml-parser": "^4.4.0"
"fast-xml-parser": "^4.4.0",
"inquirer": "^9.2.23"
},
"devDependencies": {
"@oclif/dev-cli": "^1.26.10",
"@types/inquirer": "^9.0.7",
"@types/node": "^18",
"ts-node": "^10.9.2",
"shx": "^0.3.3"
"shx": "^0.3.3",
"ts-node": "^10.9.2"
},
"peerDependencies": {
"@langchain/core": "^0.2.18"
Expand Down
2 changes: 1 addition & 1 deletion packages/cli/src/commands/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import * as llm from "@ai-citizens/llm";
import { config } from "dotenv";

config({
path: [`${process.env.AVA_CONFIG_PATH}/ava.env`, process.cwd() + "/ava.env"],
path: [`${process.env.AVA_CONFIG_PATH}/.env`, process.cwd() + "/.env"],
});
const messageHistories: Record<string, InMemoryChatMessageHistory> = {};

Expand Down
70 changes: 69 additions & 1 deletion packages/cli/src/commands/init.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,63 @@ import { Args, Command, Flags } from "@oclif/core";
import * as fs from "fs";
import inquirer from "inquirer";

const defaultConfig = `{
"modelConfig": {
"anthropic": {
"defaultModel": "claude-3-5-sonnet-20240620",
"temperature": 0.5,
"maxTokens": 8192,
"models": [
"claude-3-5-sonnet-20240620",
"claude-3-haiku-20240307",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229"
]
},
"google": {
"defaultModel": "gemini-1.5-pro",
"temperature": 0.5,
"maxTokens": 8192,
"models": ["gemini-1.0-pro", "gemini-1.5-flash", "gemini-1.5-pro"]
},
"openAI": {
"defaultModel": "gpt-4o",
"temperature": 0.5,
"maxTokens": 8192,
"models": [
"gpt-3.5-turbo",
"gpt-4",
"gpt-4-0125-preview",
"gpt-4-turbo",
"gpt-4o",
"gpt-4o-mini"
]
},
"groq": {
"defaultModel": "llama-3.1-8b-instant",
"temperature": 0.5,
"maxTokens": 8192,
"models": [
"llama-3.1-8b-instant",
"llama-3.1-70b-versatile",
"mixtral-8x7b-32768"
]
},
"ollama": {
"defaultModel": "llama3.1",
"temperature": 0.5,
"maxTokens": 8192,
"models": ["llama3.1"]
},
"local": {
"defaultModel": "hermes-2-pro-llama-3-8b",
"temperature": 0.5,
"maxTokens": 8192,
"models": ["hermes-2-pro-llama-3-8b"]
}
}
}`;

enum ApiKeys {
OPENAI_API_KEY = "OPENAI_API_KEY",
TAVILY_API_KEY = "TAVILY_API_KEY",
Expand Down Expand Up @@ -34,6 +91,10 @@ export default class Init extends Command {
char: "f",
description: "Overwrite existing config file",
}),
config: Flags.boolean({
char: "c",
description: "Create a new config file",
}),
};

private readExistingConfig(configPath: string): Record<string, string> {
Expand Down Expand Up @@ -75,7 +136,14 @@ export default class Init extends Command {
const { args, flags } = await this.parse(Init);
const currentDir = process.cwd();
const configPath = args.configPath || currentDir;
const envPath = configPath + "/ava.env";
const envPath = configPath + "/.env";

if (flags.config) {
const configObject = JSON.parse(defaultConfig);
const formattedConfig = JSON.stringify(configObject, null, 2);
fs.writeFileSync(configPath + "/ava.config.json", formattedConfig);
}

let env = this.readExistingConfig(envPath);

if (Object.keys(env).length > 0 && !flags.force) {
Expand Down
38 changes: 38 additions & 0 deletions packages/cli/src/commands/test/graph.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import { Args, Command, Flags } from "@oclif/core";
import { processYouTubeVideo, runGraphGenerator } from "@ai-citizens/graph";

export default class TestGraph extends Command {
static override args = {
type: Args.string({ description: "type of graph to run" }),
};

static override description = "describe the command here";

static override examples = ["<%= config.bin %> <%= command.id %>"];

static override flags = {
// flag with no value (-f, --force)
force: Flags.boolean({ char: "f" }),
};

public async run(): Promise<void> {
const { args, flags } = await this.parse(TestGraph);

if (args.type === "youtube" || !args.type) {
const parsedVideo = await processYouTubeVideo();
console.log(parsedVideo);
}

if (args.type === "graph") {
const parsedGraph = await runGraphGenerator(
"generate a graph for a chatbot",
{
configurable: {
thread_id: "123",
},
}
);
console.log(parsedGraph);
}
}
}
7 changes: 6 additions & 1 deletion packages/cli/src/commands/util/process/dir.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,10 @@ export default class Dir extends Command {
char: "o",
description: "output file to write to",
}),
writeToCWD: Flags.boolean({
char: "w",
description: "write to current working directory",
}),
};

public async run(): Promise<void> {
Expand All @@ -48,7 +52,7 @@ export default class Dir extends Command {
this.error("Input directory is required");
}

if (!flags.outputFile) {
if (!flags.outputFile && !flags.writeToCWD) {
this.error("Output file is required");
}

Expand All @@ -61,6 +65,7 @@ export default class Dir extends Command {
await convertDirToTextFile(args.inputDir, {
ignore,
outputPath: flags.outputFile,
writeToCWD: flags.writeToCWD,
});
}
}
Expand Down
Loading

0 comments on commit 47b6885

Please sign in to comment.