Skip to content

Commit

Permalink
Merge pull request #8 from mabry1985/feature/cli-utils
Browse files Browse the repository at this point in the history
cli updates
  • Loading branch information
mabry1985 authored Jul 31, 2024
2 parents 01588bc + 78aa8ed commit 3d74e14
Show file tree
Hide file tree
Showing 25 changed files with 584 additions and 163 deletions.
7 changes: 7 additions & 0 deletions .changeset/mighty-numbers-switch.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
"@ai-citizens/utils": patch
"@ai-citizens/ava": patch
"@ai-citizens/llm": patch
---

adds utils, additional model handling improvements
39 changes: 36 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,8 +1,41 @@
dist/
node_modules/
.DS_Store
.env
.changeset/README.md
.changeset/*.tmp.md
oclif.manifest.json
.pnpm-store/
.pnpm-store/
.npmrc

# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*

# Dependency directory
**/node_modules/**
_node_modules
.pnp.cjs
*.local

# Coverage directory used by tools like istanbul
coverage

# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?
.tmp
_docpress
tsconfig.tsbuildinfo
.env
ava.env
1 change: 0 additions & 1 deletion .npmrc

This file was deleted.

28 changes: 28 additions & 0 deletions ava.config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
{
"modelConfig": {
"anthropic": [
"claude-3-5-sonnet-20240620",
"claude-3-haiku-20240307",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229"
],
"google": ["gemini-1.0-pro", "gemini-1.5-flash", "gemini-1.5-pro"],
"groq": [
"gemma-7b-it",
"gemma2-9b-it",
"llama-3.1-8b-instant",
"llama-3.1-70b-versatile",
"mixtral-8x7b-32768"
],
"local": ["hermes-2-pro-llama-3-8b"],
"ollama": ["llama3.1"],
"openAI": [
"gpt-3.5-turbo",
"gpt-4",
"gpt-4-0125-preview",
"gpt-4-turbo",
"gpt-4o",
"gpt-4o-mini"
]
}
}
4 changes: 3 additions & 1 deletion example.env → example.ava.env
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,6 @@ TAVILY_API_KEY=
ANTHROPIC_API_KEY=
GOOGLE_API_KEY=
GROQ_API_KEY=
UNSTRUCTURED_API_KEY=
UNSTRUCTURED_API_KEY=
LOCAL_OPENAI_BASE_URL=
OLLAMA_BASE_URL=
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
"devDependencies": {
"@changesets/cli": "^2.27.7",
"ts-node": "^10.9.2",
"typescript": "^5.5.4"
"typescript": "^5.5.4",
"@types/node": "^18"
},
"dependencies": {
"@langchain/core": "^0.2.18",
Expand Down
89 changes: 27 additions & 62 deletions packages/cli/README.md
Original file line number Diff line number Diff line change
@@ -1,44 +1,39 @@
<img src="https://user-images.githubusercontent.com/449385/38243295-e0a47d58-372e-11e8-9bc0-8c02a6f4d2ac.png" width="260" height="73">

# `oclif` CLI

[![Version](https://img.shields.io/npm/v/oclif.svg)](https://npmjs.org/package/oclif)
<!-- [![Version](https://img.shields.io/npm/v/oclif.svg)](https://npmjs.org/package/oclif)
[![Downloads/week](https://img.shields.io/npm/dw/oclif.svg)](https://npmjs.org/package/oclif/oclif)
[![License](https://img.shields.io/npm/l/oclif.svg)](https://github.com/oclif/oclif/blob/main/package.json)
[![License](https://img.shields.io/npm/l/oclif.svg)](https://github.com/oclif/oclif/blob/main/package.json) -->

<!-- toc -->
* [`oclif` CLI](#oclif-cli)
* [🗒 Description](#-description)
* [🚀 Getting Started Tutorial](#-getting-started-tutorial)
* [📌 Requirements](#-requirements)
* [📌 Migrating from V1](#-migrating-from-v1)
* [🏗 Usage](#-usage)
* [📚 Examples](#-examples)
* [🔨 Commands](#-commands)
* [🚀 Contributing](#-contributing)
* [🏭 Related Repositories](#-related-repositories)
* [🦔 Learn More](#-learn-more)

- [🗒 Description](#-description)
- [🔨 Commands](#-commands)
- [🚀 Contributing](#-contributing)
- [🏭 Related Repositories](#-related-repositories)
- [🦔 Learn More](#-learn-more)
<!-- tocstop -->

# 🗒 Description

This is the `oclif` CLI for the [Open CLI Framework](https://github.com/oclif/core), that supports the development of oclif plugins and CLIs.

[See the docs for more information](http://oclif.io).
<!-- <!-- [See the docs for more information](http://oclif.io). -->

# 🚀 Getting Started Tutorial

The [Getting Started tutorial](http://oclif.io/docs/introduction) is a step-by-step guide to introduce you to oclif. If you have not developed anything in a command line before, this tutorial is a great place to get started.
Install the CLI globally:

`npm i -g @ai-citizens/ava`

See [Usage](#-usage) below for an overview of the `oclif` CLI.
Run `ava --help` to see what commands are available.

# 📌 Requirements

Currently, Node 18+ is supported. We support the [LTS versions](https://nodejs.org/en/about/releases) of Node. You can add the [node](https://www.npmjs.com/package/node) package to your CLI to ensure users are running a specific version of Node.

# 📌 Migrating from V1
<!-- # 📌 Migrating from V1
If you have been using version 1 of the [`oclif` CLI](https://github.com/oclif/oclif/tree/v1.18.4) there are some important differences to note when using the latest version.
If you have been using version 1 of the [`oclif` CLI](https://github.com/oclif/oclif/tree/v1.18.4) there are some important differences to note when using the latest version. -->

## Breaking Changes

Expand Down Expand Up @@ -88,67 +83,37 @@ hello world! (./src/commands/hello/world.ts)

- [Hello-World](https://github.com/oclif/hello-world)
- [Salesforce CLI](https://github.com/salesforcecli/cli)
- [Heroku CLI](https://github.com/heroku/cli)
- [Heroku CLI](https://github.com/heroku/cli) -->

# 🔨 Commands

<!-- commands -->
* [`ava chat`](#ava-chat)
* [`ava cla`](#ava-cla)
* [`ava init [CONFIGPATH]`](#ava-init-configpath)

## `ava chat`

Interactive chat with the AI assistant

```
USAGE
$ ava chat [--model <value>]
FLAGS
--model=<value> The model to use
DESCRIPTION
Interactive chat with the AI assistant
```

## `ava cla`

Interactive AI agent to generate and execute commands based on natural language input

```
USAGE
$ ava cla [--model <value>]

FLAGS
--model=<value> The model to use
DESCRIPTION
Interactive AI agent to generate and execute commands based on natural language input
```
- [`ava util process dir [INPUTDIR]`](#ava-util-process-dir-inputdir)

## `ava init [CONFIGPATH]`
## `ava util process dir [INPUTDIR]`

Initialize Ava configuration
Converts a directory of files to a text file

```
USAGE
$ ava init [CONFIGPATH] [-f]
$ ava util process dir [INPUTDIR] [-g <value>] [-i <value>] [-o <value>]
ARGUMENTS
CONFIGPATH Optional path for the config file
INPUTDIR input directory to convert to text file
FLAGS
-f, --force Overwrite existing config file
-g, --gitIgnore=<value> use .gitignore file to ignore files and directories
-i, --ignore=<value> ignore files and directories using comma separated string
-o, --outputFile=<value> output file to write to
DESCRIPTION
Initialize Ava configuration
Converts a directory of files to a text file
EXAMPLES
$ ava init
$ ava init /custom/path/ava.env
$ ava util process dir
```

<!-- commandsstop -->

# 🚀 Contributing
Expand Down
4 changes: 3 additions & 1 deletion packages/cli/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
},
"dependencies": {
"@ai-citizens/llm": "workspace:*",
"@ai-citizens/utils": "workspace:*",
"@langchain/anthropic": "^0.2.10",
"@langchain/google-genai": "^0.0.23",
"@langchain/groq": "^0.0.15",
Expand All @@ -42,7 +43,8 @@
"@oclif/plugin-plugins": "^5",
"clipboardy": "^4.0.0",
"dotenv": "^16.4.5",
"inquirer": "^9.2.23"
"inquirer": "^9.2.23",
"fast-xml-parser": "^4.4.0"
},
"devDependencies": {
"@oclif/dev-cli": "^1.26.10",
Expand Down
58 changes: 48 additions & 10 deletions packages/cli/src/commands/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,8 @@ import { ChatPromptTemplate } from "@langchain/core/prompts";
import { RunnableWithMessageHistory } from "@langchain/core/runnables";
import { Command, Flags } from "@oclif/core";
import inquirer from "inquirer";
import { config } from "dotenv";
config({
path: ["~/ava.env"],
});

import { getModel, isAllModel } from "@ai-citizens/llm";
import { getModelConfig } from "../utils/get-model-config.js";
import * as llm from "@ai-citizens/llm";

const messageHistories: Record<string, InMemoryChatMessageHistory> = {};

Expand All @@ -25,19 +21,61 @@ export default class Chat extends Command {
description: "The model to use",
required: false,
}),
modelSelect: Flags.boolean({
description: "Select a model",
required: false,
char: "m",
}),
};
static override description = "Interactive chat with the AI assistant";

// New method to handle model selection
public async selectModel(
modelConfig: Record<string, string[]>
): Promise<string> {
// First, select the model provider
const { selectedProvider } = await inquirer.prompt([
{
type: "list",
name: "selectedProvider",
message: "Select a model provider:",
choices: Object.keys(modelConfig),
},
]);

// Then, select the specific model from the chosen provider
const { selectedModel } = await inquirer.prompt([
{
type: "list",
name: "selectedModel",
message: `Select a ${selectedProvider} model:`,
choices: modelConfig[selectedProvider],
},
]);

return selectedModel;
}

public async run(): Promise<void> {
const { args, flags } = await this.parse(Chat);
const { flags } = await this.parse(Chat);

const modelName = flags.model || "gpt-4o-mini";
let modelName = flags.model || "gpt-4o-mini";
if (!llm.isAllModel(modelName)) {
this.log(
`------------------------------------------------\n\n Invalid model: ${modelName} \n\n Use the --modelSelect || -m flag to select a model\n\n------------------------------------------------`
);
}

if (flags.modelSelect) {
const modelConfig = getModelConfig();
modelName = await this.selectModel(modelConfig);
}

if (!isAllModel(modelName)) {
if (!llm.isAllModel(modelName)) {
throw new Error(`Invalid model: ${modelName}`);
}

const model = getModel({ model: modelName });
const model = await llm.getModel({ model: modelName });

const parser = new StringOutputParser();
const chain = prompt.pipe(model);
Expand Down
Loading

0 comments on commit 3d74e14

Please sign in to comment.