Skip to content

Commit

Permalink
Merge pull request #61 from jucasoliveira/implement-chatGPT-api
Browse files Browse the repository at this point in the history
Implement chat gpt api
  • Loading branch information
jucasoliveira authored Mar 6, 2023
2 parents c339135 + a14d533 commit 90cac44
Show file tree
Hide file tree
Showing 9 changed files with 27 additions and 186 deletions.
13 changes: 2 additions & 11 deletions bin/context.js
Original file line number Diff line number Diff line change
@@ -1,19 +1,10 @@
//const fs = require("fs");

//const contextFile = `${__dirname}/../data/context-terminal-gpt.txt`;
let context = "";
let context = [];

const addContext = (text) => {
context = `${context}\n ${text}`;
/*
if (!fs.existsSync(contextFile)) {
fs.createWriteStream(contextFile).on("open", function (fd) {
fs.appendFileSync(fd, text + "\n");
});
} else {
fs.appendFileSync(contextFile, text + "\n");
}
*/
context = [...context, text];
};

const getContext = () => {
Expand Down
67 changes: 0 additions & 67 deletions bin/file.js

This file was deleted.

55 changes: 0 additions & 55 deletions bin/fineTune.js

This file was deleted.

49 changes: 12 additions & 37 deletions bin/gpt.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,45 +2,31 @@ const { Configuration, OpenAIApi } = require("openai");
const chalk = require("chalk");
const { loadWithRocketGradient } = require("./gradient");
const { getContext, addContext } = require("./context");
const { appendToFile, uploadFile } = require("./file");
const { fineTune, getFineTuneModel, setFineTuneModel } = require("./fineTune");

let converstationLimit = 0;

const checkModel = (options) => {
return getFineTuneModel() || options.engine || "text-davinci-002";
};

const generateCompletion = async (apiKey, model, prompt, options) => {
const generateCompletion = async (apiKey, prompt) => {
try {
let innerContext = getContext();
const tgptModel = `${model}-terminal-gpt`;
const file = `${__dirname}/../data/${tgptModel}.jsonl`;

const configuration = new Configuration({
apiKey,
});

const openai = new OpenAIApi(configuration);
const spinner = loadWithRocketGradient("Thinking...").start();
addContext(`${prompt}\n`);

addContext({"role": "user", "content": prompt});
addContext({"role": "system", "content": "Read the context, when returning the answer ,always wrapping block of code exactly within triple backticks"});

const request = await openai
.createCompletion({
model: checkModel(options),
prompt: `Read the context, analyze and return an answer for the prompt,always wrapping block of code exactly within triple backticks.\nContext:${innerContext}\nPrompt:${prompt}\n`,
max_tokens: 2048,
temperature: parseInt(options.temperature) || 0.7,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0,
})
const request = await openai.createChatCompletion({
model:"gpt-3.5-turbo",
messages:getContext(),
})
.then((res) => {
addContext(`${res.data.choices[0].text}`);
addContext(res.data.choices[0].message);
spinner.stop();
return res;
return res.data.choices[0].message;
})
.catch((err) => {
checkModel(options);
if (err["response"]["status"] == "404") {
console.error(
`${chalk.red(
Expand Down Expand Up @@ -83,27 +69,16 @@ const generateCompletion = async (apiKey, model, prompt, options) => {
return "error";
});

if (request == undefined || !request.data?.choices?.[0].text) {
if (request == undefined || !request?.content) {
throw new Error("Something went wrong!");
}

if (options.finetunning) {
converstationLimit = converstationLimit + 1;
appendToFile(file, prompt, request.data.choices[0].text);
if (converstationLimit === parseInt(options.limit)) {
const uploadedFile = await uploadFile(apiKey, file);
const fineTuning = await fineTune(apiKey, uploadedFile.id);
setFineTuneModel(fineTuning.fine_tuned_model);
addContext("");
}
}
return request;
} catch (error) {
console.error(`${chalk.red("Something went wrong!!")} ${error}`);
}
};

module.exports = {
appendToFile,
generateCompletion,
};
4 changes: 2 additions & 2 deletions bin/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ commander
)
.option("-l,--limit <limit>", "The limit of prompts to train the model with")
.usage(`"<project-directory>" [options]`)
.action(async (options) => {
.action(async () => {
intro();
apiKeyPrompt().then((apiKey) => {
const prompt = async () => {
Expand All @@ -43,7 +43,7 @@ commander
case "clear":
return process.stdout.write("\x1Bc");
default:
generateResponse(apiKey, prompt, options, response);
generateResponse(apiKey, prompt, response);
return;
}
};
Expand Down
1 change: 0 additions & 1 deletion bin/intro.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
const gradient = require("gradient-string");
const chalk = require("chalk");
const marked = require("marked");

const intro = () => {
const duck = ` ${gradient("orange", "yellow").multiline(
Expand Down
10 changes: 4 additions & 6 deletions bin/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -50,21 +50,19 @@ const checkBlockOfCode = async (text, prompt) => {
}
};

const generateResponse = async (apiKey, prompt, options, response) => {
const generateResponse = async (apiKey, prompt, response) => {
try {
const request = await generateCompletion(
apiKey,
options.engine || "text-davinci-002",
response.value,
options
response.value
);

if (request == undefined || !request.data?.choices?.[0].text) {
if (request == undefined || !request?.content) {
throw new Error("Something went wrong!");
}

// map all choices to text
const getText = request.data.choices.map((choice) => choice.text);
const getText = [request.content];

console.log(`${chalk.cyan("GPT-3: ")}`);
// console log each character of the text with a delay and then call prompt when it finished
Expand Down
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "terminalgpt",
"version": "1.0.12",
"version": "1.5.0",
"main": "index.js",
"description": "Get GPT like chatGPT on your terminal",
"scripts": {
Expand Down Expand Up @@ -34,7 +34,7 @@
"lowdb": "^5.0.5",
"markdown": "^0.5.0",
"marked": "^4.2.12",
"openai": "^3.1.0",
"openai": "^3.2.0",
"ora": "^5.4.1",
"prompts": "^2.4.2"
},
Expand Down
10 changes: 5 additions & 5 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1015,7 +1015,7 @@ cli-spinners@^2.5.0:
resolved "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.7.0.tgz"
integrity sha512-qu3pN8Y3qHNgE2AFweciB1IfMnmZ/fsNTEE+NOFjmGB2F/7rLhnhzppvpCnN4FovtP26k8lHyy9ptEbNwWFLzw==

"clipboardy@ 2.3.0 ":
clipboardy@2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/clipboardy/-/clipboardy-2.3.0.tgz#3c2903650c68e46a91b388985bc2774287dba290"
integrity sha512-mKhiIL2DrQIsuXMgBgnfEHOZOryC7kY7YO//TN6c63wlEm3NG5tz+YgY5rVi29KCmq/QQjKYvM7a19+MDOTHOQ==
Expand Down Expand Up @@ -2395,10 +2395,10 @@ onetime@^5.1.0, onetime@^5.1.2:
dependencies:
mimic-fn "^2.1.0"

openai@^3.1.0:
version "3.1.0"
resolved "https://registry.npmjs.org/openai/-/openai-3.1.0.tgz"
integrity sha512-v5kKFH5o+8ld+t0arudj833Mgm3GcgBnbyN9946bj6u7bvel4Yg6YFz2A4HLIYDzmMjIo0s6vSG9x73kOwvdCg==
openai@^3.2.0:
version "3.2.1"
resolved "https://registry.yarnpkg.com/openai/-/openai-3.2.1.tgz#1fa35bdf979cbde8453b43f2dd3a7d401ee40866"
integrity sha512-762C9BNlJPbjjlWZi4WYK9iM2tAVAv0uUp1UmI34vb0CN5T2mjB/qM6RYBmNKMh/dN9fC+bxqPwWJZUTWW052A==
dependencies:
axios "^0.26.0"
form-data "^4.0.0"
Expand Down

0 comments on commit 90cac44

Please sign in to comment.