Skip to content

Commit e02cfdc

Browse files
committed
adds init command, model manager
1 parent dc7afc5 commit e02cfdc

File tree

11 files changed

+211
-50
lines changed

11 files changed

+211
-50
lines changed

.github/workflows/changeset.yml

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
name: Changesets
2+
on:
3+
push:
4+
branches:
5+
- main
6+
env:
7+
CI: true
8+
PNPM_CACHE_FOLDER: .pnpm-store
9+
jobs:
10+
version:
11+
timeout-minutes: 15
12+
runs-on: ubuntu-latest
13+
steps:
14+
- name: checkout code repository
15+
uses: actions/checkout@v3
16+
with:
17+
fetch-depth: 0
18+
- name: setup node.js
19+
uses: actions/setup-node@v3
20+
with:
21+
node-version: 18
22+
- name: install pnpm
23+
run: npm i pnpm@latest -g
24+
- name: setup pnpm config
25+
run: pnpm config set store-dir $PNPM_CACHE_FOLDER
26+
- name: install dependencies
27+
run: pnpm install | grep -v "^npm notice"
28+
- name: create and publish versions
29+
uses: changesets/action@v1
30+
with:
31+
version: pnpm ci:version
32+
commit: "chore: update versions"
33+
title: "chore: update versions"
34+
publish: pnpm ci:publish --no-git-checks
35+
env:
36+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
37+
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
38+
- name: Debug NPM_TOKEN
39+
run: |
40+
if [ -n "$NPM_TOKEN" ]; then
41+
echo "NPM_TOKEN is set"
42+
else
43+
echo "NPM_TOKEN is not set"
44+
fi
45+
env:
46+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
47+
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}

.npmrc

Lines changed: 0 additions & 1 deletion
This file was deleted.

package.json

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
"name": "acai-monorepo",
33
"version": "0.0.0",
44
"description": "",
5-
"main": "index.js",
65
"scripts": {
76
"ci:publish": "pnpm publish -r --no-git-checks"
87
},
@@ -11,7 +10,6 @@
1110
"license": "ISC",
1211
"devDependencies": {
1312
"@changesets/cli": "^2.27.7",
14-
"dotenv-cli": "^7.4.2",
1513
"ts-node": "^10.9.2",
1614
"typescript": "^5.5.4"
1715
},

packages/cli/package.json

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
2-
"name": "@acai/ava",
3-
"version": "0.0.0",
2+
"name": "@artificialcitizens/ava",
3+
"version": "0.0.1",
44
"description": "",
55
"main": "index.js",
66
"type": "module",
@@ -20,7 +20,7 @@
2020
"topicSeparator": " "
2121
},
2222
"dependencies": {
23-
"@acai/llm": "workspace:*",
23+
"@artificialcitizens/llm": "workspace:*",
2424
"@langchain/anthropic": "^0.2.10",
2525
"@langchain/google-genai": "^0.0.23",
2626
"@langchain/groq": "^0.0.15",

packages/cli/src/commands/agent.ts

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,13 @@ import { ChatPromptTemplate } from "@langchain/core/prompts";
55
import { RunnableWithMessageHistory } from "@langchain/core/runnables";
66
import { Command } from "@oclif/core";
77
import clipboardy from "clipboardy";
8-
import "dotenv/config";
98
import inquirer from "inquirer";
109
import { exec } from "node:child_process";
11-
12-
import { getModel } from "@acai/llm";
13-
10+
import { getModel } from "@artificialcitizens/llm";
11+
import { config } from "dotenv";
12+
config({
13+
path: ["~/ava.env"],
14+
});
1415
const messageHistories: Record<string, InMemoryChatMessageHistory> = {};
1516
const MAX_OUTPUT_LINES = 100; // Adjust this value as needed
1617

packages/cli/src/commands/init.ts

Lines changed: 109 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,109 @@
1+
import { Args, Command, Flags } from "@oclif/core";
2+
import * as fs from "fs";
3+
import * as path from "path";
4+
import * as os from "os";
5+
import inquirer from "inquirer";
6+
7+
enum ApiKeys {
8+
OPENAI_API_KEY = "OPENAI_API_KEY",
9+
TAVILY_API_KEY = "TAVILY_API_KEY",
10+
ANTHROPIC_API_KEY = "ANTHROPIC_API_KEY",
11+
GOOGLE_API_KEY = "GOOGLE_API_KEY",
12+
GROQ_API_KEY = "GROQ_API_KEY",
13+
UNSTRUCTURED_API_KEY = "UNSTRUCTURED_API_KEY",
14+
}
15+
16+
export default class Init extends Command {
17+
static override args = {
18+
configPath: Args.string({
19+
description: "Optional path for the config file",
20+
required: false,
21+
}),
22+
};
23+
24+
static override description = "Initialize Ava configuration";
25+
26+
static override examples = [
27+
"<%= config.bin %> <%= command.id %>",
28+
"<%= config.bin %> <%= command.id %> /custom/path/ava.env",
29+
];
30+
31+
static override flags = {
32+
force: Flags.boolean({
33+
char: "f",
34+
description: "Overwrite existing config file",
35+
}),
36+
};
37+
38+
private readExistingConfig(configPath: string): Record<string, string> {
39+
const config: Record<string, string> = {};
40+
if (fs.existsSync(configPath)) {
41+
const fileContent = fs.readFileSync(configPath, "utf-8");
42+
fileContent.split("\n").forEach((line) => {
43+
const [key, value] = line.split("=");
44+
if (key && value) {
45+
config[key.trim()] = value.trim();
46+
}
47+
});
48+
}
49+
return config;
50+
}
51+
52+
private async promptForMissingKeys(
53+
config: Record<string, string>
54+
): Promise<Record<string, string>> {
55+
for (const key of Object.values(ApiKeys)) {
56+
if (!config[key]) {
57+
const { value } = await inquirer.prompt([
58+
{
59+
type: "input",
60+
name: "value",
61+
message: `Enter your ${key} (press Enter to skip):`,
62+
},
63+
]);
64+
if (value.trim() !== "") {
65+
config[key] = value.trim();
66+
}
67+
}
68+
}
69+
return config;
70+
}
71+
72+
public async run(): Promise<void> {
73+
const { args, flags } = await this.parse(Init);
74+
75+
const configPath = args.configPath || path.join(os.homedir(), "ava.env");
76+
77+
let config = this.readExistingConfig(configPath);
78+
79+
if (Object.keys(config).length > 0 && !flags.force) {
80+
this.log(
81+
`Existing config file found at ${configPath}. Updating with missing keys.`
82+
);
83+
} else {
84+
config = {};
85+
this.log(`Creating new config file at ${configPath}.`);
86+
}
87+
88+
try {
89+
const updatedConfig = await this.promptForMissingKeys(config);
90+
91+
const configContent = Object.entries(updatedConfig)
92+
.map(([key, value]) => `${key}=${value}`)
93+
.join("\n");
94+
95+
fs.writeFileSync(configPath, configContent);
96+
this.log(
97+
`Ava config file ${
98+
Object.keys(config).length > 0 ? "updated" : "created"
99+
} at: ${configPath}`
100+
);
101+
} catch (error) {
102+
this.error(
103+
`Failed to ${
104+
Object.keys(config).length > 0 ? "update" : "create"
105+
} config file: ${error}`
106+
);
107+
}
108+
}
109+
}

packages/llm/package.json

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
2-
"name": "@acai/llm",
3-
"version": "0.0.0",
2+
"name": "@artificialcitizens/llm",
3+
"version": "0.0.1",
44
"description": "",
55
"type": "module",
66
"main": "dist/index.js",
@@ -17,5 +17,11 @@
1717
"@langchain/groq": "^0.0.15",
1818
"@langchain/ollama": "^0.0.2",
1919
"@langchain/openai": "^0.2.5"
20+
},
21+
"peerDependencies": {
22+
"dotenv": "^16.4.3"
23+
},
24+
"devDependencies": {
25+
"@types/node": "^18"
2026
}
2127
}

packages/llm/src/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
export { getModel } from "./model-manager.js";
1+
export * from "./model-manager.js";

packages/llm/src/model-manager.ts

Lines changed: 24 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ import { ChatGoogleGenerativeAI } from "@langchain/google-genai";
33
import { ChatGroq } from "@langchain/groq";
44
import { ChatOllama } from "@langchain/ollama";
55
import { ChatOpenAI } from "@langchain/openai";
6-
import "dotenv/config";
76

87
const modelConfig = {
98
anthropic: [
@@ -45,7 +44,7 @@ export type Model =
4544
| OllamaModel
4645
| OpenAIModel;
4746

48-
const openAiModel = ({
47+
export const openAiModel = ({
4948
baseUrl = "https://api.openai.com/v1",
5049
maxTokens = 1024,
5150
model = "gpt-3.5-turbo",
@@ -63,27 +62,37 @@ const openAiModel = ({
6362
temperature,
6463
});
6564

66-
const groqModel = ({
65+
export const groqModel = ({
6766
maxTokens = 1024,
6867
model = "mixtral-8x7b-32768",
6968
temperature = 0.5,
7069
}: {
7170
maxTokens?: number;
7271
model?: GroqModel;
7372
temperature?: number;
74-
}) => new ChatGroq({ maxTokens, model, temperature });
73+
}) =>
74+
new ChatGroq({
75+
maxTokens,
76+
model,
77+
temperature,
78+
});
7579

76-
const anthropicModel = ({
80+
export const anthropicModel = ({
7781
maxTokens = 1024,
7882
model = "claude-3-sonnet-20240229",
7983
temperature = 0.5,
8084
}: {
8185
maxTokens?: number;
8286
model?: AnthropicModel;
8387
temperature?: number;
84-
}) => new ChatAnthropic({ maxTokens, model, temperature });
88+
}) =>
89+
new ChatAnthropic({
90+
maxTokens,
91+
model,
92+
temperature,
93+
});
8594

86-
const googleModel = ({
95+
export const googleModel = ({
8796
maxTokens = 1024,
8897
model = "gemini-1.5-pro",
8998
temperature = 0.5,
@@ -98,7 +107,7 @@ const googleModel = ({
98107
temperature,
99108
});
100109
// Any OpenAI compatible endpoint should work here, tested with llama.cpp server
101-
const localModel = ({
110+
export const localModel = ({
102111
baseURL = "http://localhost:8080/v1",
103112
maxTokens = 1024,
104113
model = "hermes-2-pro-llama-3-8b",
@@ -109,9 +118,14 @@ const localModel = ({
109118
model?: string;
110119
temperature?: number;
111120
}) =>
112-
new ChatOpenAI({ configuration: { baseURL }, maxTokens, model, temperature });
121+
new ChatOpenAI({
122+
configuration: { baseURL },
123+
maxTokens,
124+
model,
125+
temperature,
126+
});
113127

114-
const ollamaModel = ({
128+
export const ollamaModel = ({
115129
baseUrl = "http://localhost:11434",
116130
model = "llama3.1",
117131
temperature = 0.1,
@@ -167,4 +181,3 @@ export const getModel = ({
167181

168182
throw new Error(`Unsupported model: ${model}`);
169183
};
170-

packages/shared/package.json

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,5 +11,8 @@
1111
"license": "ISC",
1212
"dependencies": {
1313
"@langchain/core": "^0.2.18"
14+
},
15+
"peerDependencies": {
16+
"dotenv": "^16.4.3"
1417
}
1518
}

0 commit comments

Comments
 (0)