Skip to content

Commit

Permalink
Merge pull request #82 from jucasoliveira/80-npm-g-install-terminalgp…
Browse files Browse the repository at this point in the history
…t-failed-on-mac

feature/new fix for terminalGPT
  • Loading branch information
jucasoliveira authored Nov 13, 2023
2 parents 1e0ff1a + aa1842f commit e484a82
Show file tree
Hide file tree
Showing 7 changed files with 3,773 additions and 3,654 deletions.
26 changes: 13 additions & 13 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
{
"name": "terminalgpt",
"version": "1.6.9",
"version": "1.7.3",
"main": "lib/index.js",
"description": "Get GPT like chatGPT on your terminal",
"scripts": {
"tgpt": "node lib/index.js",
"test": "jest --ci --coverage --verbose",
"dev": "ts-node src/index.ts chat --engine gpt-4 --temperature 0.7",
"dev:markdown": "ts-node src/index.js chat --engine gpt-4 --temperature 0.7 --markdown",
"tunne": "ts-node src/index.js chat --engine gpt-4 --temperature 0.7 --finetunning true --limit 1",
"dev:delete": "ts-node src/index.js delete",
"dev:markdown": "ts-node src/index.ts chat --engine gpt-4 --temperature 0.7 --markdown",
"tunne": "ts-node src/index.ts chat --engine gpt-4 --temperature 0.7 --finetunning true --limit 1",
"dev:delete": "ts-node src/index.ts delete",
"postinstall": "tsc"
},
"homepage": "https://github.com/jucasoliveira/terminalGPT#readme",
Expand All @@ -32,24 +32,24 @@
}
},
"dependencies": {
"@types/gradient-string": "^1.1.5",
"@types/marked": "^6.0.0",
"@types/marked-terminal": "^6.0.1",
"@types/node": "^16.0.0",
"@types/prompts": "^2.4.8",
"chalk": "^4.1.2",
"clipboardy": "2.3.0",
"commander": "^9.5.0",
"compromise": "^14.8.1",
"gradient-string": "^2.0.2",
"lowdb": "^5.1.0",
"markdown": "^0.5.0",
"marked": "^4.3.0",
"marked-terminal": "^5.2.0",
"openai": "^3.2.0",
"marked": "^9.1.6",
"marked-terminal": "^6.0.0",
"openai": "^4.17.4",
"ora": "^5.4.1",
"prompts": "^2.4.2",
"typesync": "^0.11.1",
"@types/node": "^16.0.0",
"@types/prompts": "^2.0.10",
"@types/gradient-string": "^1.1.2",
"@types/marked": "^4.3.1",
"@types/marked-terminal": "^3.1.3"
"typesync": "^0.11.1"
},
"devDependencies": {
"@types/chai": "^4.3.5",
Expand Down
11 changes: 5 additions & 6 deletions src/context.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
//const fs = require("fs");

//const contextFile = `${__dirname}/../data/context-terminal-gpt.txt`;

import {ChatCompletionRequestMessage} from "openai";
let context: any[] = [];

let context: ChatCompletionRequestMessage[] = [];

export function addContext(text: ChatCompletionRequestMessage) {
context = [...context, text];
export function addContext(text: any) {
context = [...context, text];
}

export const getContext = () => context
export const getContext = () => context;
152 changes: 74 additions & 78 deletions src/gpt.ts
Original file line number Diff line number Diff line change
@@ -1,89 +1,85 @@
import chalk from "chalk";

import {Configuration, OpenAIApi} from "openai";
import OpenAI from "openai";

import {addContext, getContext} from "./context";

import {loadWithRocketGradient} from "./gradient";
import { addContext, getContext } from "./context";

import { loadWithRocketGradient } from "./gradient";

export default async (
apiKey: string | Promise<string>,
prompt: string,
opts: {
engine: string;
temperature: unknown;
},
url: string | undefined
apiKey: string | Promise<string>,
prompt: string,
opts: {
engine: string;
temperature: unknown;
}
) => {
const configuration = new Configuration({
apiKey,
basePath: url
});

const openai = new OpenAIApi(configuration);
const spinner = loadWithRocketGradient("Thinking...").start();
const apiKeyValue = await apiKey;
const openai = new OpenAI({ apiKey: apiKeyValue });
const spinner = loadWithRocketGradient("Thinking...").start();

addContext({
"role": "system",
"content": "Read the context, when returning the answer , always wrapping block of code exactly within triple backticks "
});
addContext({"role": "user", "content": prompt});
addContext({
role: "system",
content:
"Read the context, when returning the answer , always wrapping block of code exactly within triple backticks ",
});
addContext({ role: "user", content: prompt });

const request = await openai.createChatCompletion({
model: opts.engine || "gpt-3.5-turbo",
messages: getContext(),
temperature: opts.temperature ? Number(opts.temperature) : 1
const request = await openai.chat.completions
.create({
model: opts.engine || "gpt-4-1106-preview",
messages: getContext(),
temperature: opts.temperature ? Number(opts.temperature) : 1,
})
.then((res) => {
if (typeof res.choices[0].message !== "undefined") {
addContext(res.choices[0].message);
spinner.stop();
return res.choices[0].message;
} else {
throw new Error("Undefined messages received");
}
})
.then((res) => {
if (typeof res.data.choices[0].message !== 'undefined') {
addContext(res.data.choices[0].message);
spinner.stop();
return res.data.choices[0].message
} else {
throw new Error("Undefined messages received")
}
})
.catch((err) => {
spinner.stop();
switch (err["response"]["status"]) {
case 404:
throw new Error(
`${chalk.red(
"Not Found: Model not found. Please check the model name."
)}`
);
case 429:
throw new Error(
`${chalk.red(
"API Rate Limit Exceeded: ChatGPT is getting too many requests from the user in a short period of time. Please wait a while before sending another message."
)}`
)
case 400:
throw new Error(
`${chalk.red(
"Bad Request: Prompt provided is empty or too long. Prompt should be between 1 and 4096 tokens."
)}`
);
case 402:
throw new Error(
`${chalk.red(
"Payment Required: ChatGPT quota exceeded. Please check you chatGPT account."
)}`
);
case 503:
throw new Error(
`${chalk.red(
"Service Unavailable: ChatGPT is currently unavailable, possibly due to maintenance or high traffic. Please try again later."
)}`
);
default:
throw new Error(`${err}`);
}
})
if (request === undefined || !request?.content) {
throw new Error("Undefined request or content");
}
.catch((err) => {
spinner.stop();
switch (err["response"]["status"]) {
case 404:
throw new Error(
`${chalk.red(
"Not Found: Model not found. Please check the model name."
)}`
);
case 429:
throw new Error(
`${chalk.red(
"API Rate Limit Exceeded: ChatGPT is getting too many requests from the user in a short period of time. Please wait a while before sending another message."
)}`
);
case 400:
throw new Error(
`${chalk.red(
"Bad Request: Prompt provided is empty or too long. Prompt should be between 1 and 4096 tokens."
)}`
);
case 402:
throw new Error(
`${chalk.red(
"Payment Required: ChatGPT quota exceeded. Please check you chatGPT account."
)}`
);
case 503:
throw new Error(
`${chalk.red(
"Service Unavailable: ChatGPT is currently unavailable, possibly due to maintenance or high traffic. Please try again later."
)}`
);
default:
throw new Error(`${err}`);
}
});
if (request === undefined || !request?.content) {
throw new Error("Undefined request or content");
}

return request
}
return request;
};
66 changes: 33 additions & 33 deletions src/gradient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,17 @@ import chalk from "chalk";

import ora from "ora";

const gradientColors = [
`#ff5e00`,
`#ff4c29`,
`#ff383f`,
`#ff2453`,
`#ff0565`,
`#ff007b`,
`#f5008b`,
`#e6149c`,
`#d629ae`,
`#c238bd`,
const gradientColors: string[] = [
`#ff5e00`,
`#ff4c29`,
`#ff383f`,
`#ff2453`,
`#ff0565`,
`#ff007b`,
`#f5008b`,
`#e6149c`,
`#d629ae`,
`#c238bd`,
];

export const rocketAscii = "■■▶";
Expand All @@ -21,26 +21,26 @@ export const rocketAscii = "■■▶";
// visual representation of what this generates:
// gradientColors: "..xxXX"
// referenceGradient: "..xxXXXXxx....xxXX"
const referenceGradient = [
...gradientColors,
// draw the reverse of the gradient without
// accidentally mutating the gradient (ugh, reverse())
...[...gradientColors].reverse(),
...gradientColors,
const referenceGradient: string[] = [
...gradientColors,
// draw the reverse of the gradient without
// accidentally mutating the gradient (ugh, reverse())
...[...gradientColors].reverse(),
...gradientColors,
];

export function getGradientAnimFrames() {
const frames = [];
for (let start = 0; start < gradientColors.length * 2; start++) {
const end = start + gradientColors.length - 1;
frames.push(
referenceGradient
.slice(start, end)
.map((g) => chalk.bgHex(g)(" "))
.join("")
);
}
return frames;
const frames: string[] = [];
for (let start = 0; start < gradientColors.length * 2; start++) {
const end = start + gradientColors.length - 1;
frames.push(
referenceGradient
.slice(start, end)
.map((g) => chalk.bgHex(g as string)(" "))
.join("")
);
}
return frames;
}

// function sleep(time: number) {
Expand All @@ -64,16 +64,16 @@ export function getGradientAnimFrames() {
// return frames;
// }


/**
* Generate loading spinner with rocket flames!
* @param text display text next to rocket
* @returns Ora spinner for running .stop()
*/
export const loadWithRocketGradient = (text: string) => ora({
export const loadWithRocketGradient = (text: string) =>
ora({
spinner: {
interval: 80,
frames: getGradientAnimFrames(),
interval: 80,
frames: getGradientAnimFrames(),
},
text: `${rocketAscii} ${text}`,
})
});
Loading

0 comments on commit e484a82

Please sign in to comment.