diff --git a/frontend/src/components/Chatbot.js b/frontend/src/components/Chatbot.js
index 331af5e..ecf3f92 100644
--- a/frontend/src/components/Chatbot.js
+++ b/frontend/src/components/Chatbot.js
@@ -17,7 +17,7 @@ function Chatbot() {
-
+
diff --git a/frontend/src/interface.js b/frontend/src/interface.js
index 2c9f6e0..21a6ed3 100644
--- a/frontend/src/interface.js
+++ b/frontend/src/interface.js
@@ -67,6 +67,19 @@ export async function sendMessage() {
queryButton.disabled = false;
}
+// Send most relevant files to ChatGPT API
+export async function sendFiles(files, userInput) {
+ let request = "";
+
+ for (let i = 0; i < files.length; i++) {
+ request = request.concat(files[i] + "\n");
+ }
+
+ request = request.concat(userInput);
+
+ await fetchChatGPTResponse(request);
+}
+
// Create a query from the user input that will be used to find the most relevant files
export async function sendCodebaseQuery() {
const userInput = document.getElementById("user-input").value;
@@ -157,5 +170,6 @@ export async function fetchPineconeResponse(userInput) {
appendMessage("Error", botMessage.error);
} else {
appendMessage("Assistant", botMessage.text);
+ sendFiles(botMessage.files, userInput);
}
}
diff --git a/server/config/pineconeConfig/embeddingConfig.js b/server/config/pineconeConfig/embeddingConfig.js
deleted file mode 100644
index 7b3f281..0000000
--- a/server/config/pineconeConfig/embeddingConfig.js
+++ /dev/null
@@ -1,35 +0,0 @@
-const openai = require('../openaiConfig')
-require('dotenv').config();
-
-// Function to generate embeddings using OpenAI's API
-async function generateEmbeddings(tokens, fileType) {
- try {
- // Flatten the tokens into a format suitable for OpenAI
-
- let text = "placeholder";
-
- if (fileType == "JSON") {
- text = tokens.map(token => JSON.stringify(token)).join('\n');
- }
- else if (fileType == "string") {
- text = tokens;
- }
-
- // Request embeddings
- const response = await openai.embeddings.create({
- model: 'text-embedding-ada-002', // Use an appropriate embedding model
- input: text,
- encoding_format: 'float'
- });
-
- console.log('Embedding Dimension: ', response.data[0].embedding.length);
- console.log('OpenAI embeddings response:', response.data);
- return response.data;
-
-
- } catch (error) {
- console.error('Error generating embeddings with OpenAI:', error);
- }
-}
-
-module.exports = generateEmbeddings;
diff --git a/server/config/pineconeConfig/pineconeManager.js b/server/config/pineconeConfig/pineconeManager.js
index 395fbc0..3fc7333 100644
--- a/server/config/pineconeConfig/pineconeManager.js
+++ b/server/config/pineconeConfig/pineconeManager.js
@@ -26,6 +26,16 @@ class PineconeManager {
this.index = this.pc.index(indexName);
}
+ /**
+ * Creates a delay for a specified amount of time.
+ *
+ * @param {number} ms - The delay time in milliseconds.
+ * @returns {Promise} A promise that resolves after the specified time.
+ */
+ delay(ms) {
+ return new Promise(resolve => setTimeout(resolve, ms));
+ }
+
/**
* Initializes the Pinecone index with the specified configuration.
* Creates the index on the Pinecone server if it does not already exist.
@@ -45,57 +55,70 @@ class PineconeManager {
},
},
});
+
this.index = this.pc.index(this.indexName); // Reinitialize the index after creation
+ await this.delay(3000); // 3 second delay
}
/**
* Upserts embeddings into the specified namespace of the Pinecone index.
*
* @async
- * @param {number[]} embeddings - The embeddings vector to upsert.
- * @param {string} [namespace="SampleCode"] - The namespace in the index to upsert to.
- * @param {string} [id="SampleCode"] - The unique ID for the vector.
+ * @param {Object} data - The dictionary of functions and classes with embeddings.
+ * @param {string} [namespace="codebase"] - The namespace in the index to upsert to.
* @returns {Promise} A promise that resolves once the embeddings are upserted.
*/
- async upsertEmbeddings(embeddings, namespace, id) {
- await this.index.namespace(namespace).upsert([
- {
- id: id,
- values: embeddings,
- },
- ]);
- }
+ async upsertEmbeddings(data, namespace = "codebase") {
+ // Prepare the upsert request payload
+ const upsertPayload = [];
- /**
- * Retrieves and logs the statistics of the Pinecone index.
- *
- * @async
- * @returns {Promise} A promise that resolves once the index stats are logged.
- */
- async checkIndex() {
- const stats = await this.index.describeIndexStats();
- console.log(stats);
+ // Handle functions
+ data.functions.forEach((func) => {
+ if (func.embedding && Array.isArray(func.embedding)) {
+ upsertPayload.push({
+ id: func.function_name,
+ values: func.embedding,
+ metadata: { filepath: func.filepath, type: 'function' }
+ });
+ }
+ });
+
+ // Handle classes
+ data.classes.forEach((cls) => {
+ if (cls.embedding && Array.isArray(cls.embedding)) {
+ upsertPayload.push({
+ id: cls.class_name,
+ values: cls.embedding,
+ metadata: { filepath: cls.filepath, type: 'class' }
+ });
+ }
+ });
+
+ // Upsert the data into Pinecone
+ await this.index.namespace(namespace).upsert(upsertPayload);
+ await this.delay(3000); // 3 second delay
+ console.log('Embeddings upserted successfully.');
}
/**
- * Performs a similarity search within the specified namespace of the Pinecone index.
- * Logs the search results to the console.
+ * Queries the Pinecone index using the provided embedding.
*
* @async
- * @param {number[]} vector - The query vector for the similarity search.
- * @param {number} [topK=3] - The number of top results to return.
- * @param {string} [namespace="ns1"] - The namespace in the index to search within.
- * @param {boolean} [includeValues=true] - Whether to include vector values in the results.
- * @returns {Promise} A promise that resolves once the search results are logged.
- * @returns {JSON} A data structure giving the top k results.
+ * @param {Array} embedding - The embedding vector to query with.
+ * @param {string} [namespace="samplecode"] - The namespace to query.
+ * @param {number} [topK=5] - The number of top results to return.
+ * @returns {Promise