diff --git a/README.md b/README.md
index 3246ec8..9b3e0e3 100644
--- a/README.md
+++ b/README.md
@@ -1,21 +1,21 @@
-# Alita Code
+# ELITEA Code
-Introducing Alita Code for VSCode, the ultimate AI-powered IDE extension that revolutionizes the way you develop, test, and maintain your code. Alita Code harnesses the power of generative AI to provide intelligent suggestions, streamline code implementation, and automate essential tasks, elevating your coding experience to new heights. With customizable internal and external prompts, Alita Code offers an unparalleled level of adaptability, catering to your unique project needs and preferences.
+Introducing ELITEA Code for VSCode, the ultimate AI-powered IDE extension that revolutionizes the way you develop, test, and maintain your code. ELITEA Code harnesses the power of generative AI to provide intelligent suggestions, streamline code implementation, and automate essential tasks, elevating your coding experience to new heights. With customizable internal and external prompts, ELITEA Code offers an unparalleled level of adaptability, catering to your unique project needs and preferences.
-# Why Alita Code?
+# Why ELITEA Code?
## Boost productivity with AI-powered suggestions
-Alita Code intelligently analyzes your code and provides real-time suggestions for implementing features, enhancing code readability, and optimizing performance. Save time and effort while crafting high-quality code.
+ELITEA Code intelligently analyzes your code and provides real-time suggestions for implementing features, enhancing code readability, and optimizing performance. Save time and effort while crafting high-quality code.
## Automate testing and documentation
-Generate unit-tests, integration tests, and automated tests with ease, ensuring your code is robust and reliable. Alita Code also automatically adds comments to your code, making it more understandable and maintainable for your team.
+Generate unit-tests, integration tests, and automated tests with ease, ensuring your code is robust and reliable. ELITEA Code also automatically adds comments to your code, making it more understandable and maintainable for your team.
## Customizable prompts for personalized assistance
-Tailor Alita Code to your specific needs with customizable internal and external prompts. Create and modify prompts within your IDE, or leverage the power of Alita Backend's large language model for external prompts, offering an unparalleled level of adaptability.
+Tailor ELITEA Code to your specific needs with customizable internal and external prompts. Create and modify prompts within your IDE, or leverage the power of ELITEA Backend's large language model for external prompts, offering an unparalleled level of adaptability.
# Features list:
@@ -26,7 +26,7 @@ Tailor Alita Code to your specific needs with customizable internal and external
- Automated test creation
- Automatic code commenting
- Customizable internal prompts
-- Project-specific external prompts powered by Alita Backend
+- Project-specific external prompts powered by ELITEA Backend
- Code explanation and optimization recommendations
- Native IDE integration
- Regular updates and improvements
@@ -35,36 +35,27 @@ Tailor Alita Code to your specific needs with customizable internal and external
- Secure and privacy-conscious implementation
# Extension Commands
-- Alita: Init - Initialize Alita Code and create .promptLib folder in a root of your open workspace
-- Alita: Create Prompt - Create a new prompt in .promptLib folder
-- Alita: Extend Context - Extend context of the prompt in .promptLib folder
-- Alita: Predict - Provide a list of prompts to choose from and generate prediction based on the selected prompt and **its last version**
-- Alita: Similarity - Provide list of embedding to run similarity search against
-- Alita: Sync External Prompts - Sync external prompts from Alita Backend
-
-
-# Supported LLM providers
-- Alita - https://projectalita.ai
-- EPAM Dial - https://epam-rail.com
-- OpenAI - https://openai.com
-- Azure Open AI - https://azure.microsoft.com/en-us/products/ai-services/openai-service
-
+- Elitea: Init - Initialize ELITEA Code and create .promptLib folder in a root of your open workspace
+- Elitea: Create Prompt - Create a new prompt in .promptLib folder
+- Elitea: Extend Context - Extend context of the prompt in .promptLib folder
+- Elitea: Predict - Provide a list of prompts to choose from and generate prediction based on the selected prompt and **its last version**
+- Elitea: Sync External Prompts - Sync external prompts from ELITEA Backend
# Extension Settings
This extension contributes the following settings:
-- alitacode.enable: enable/disable this extension
-- alitacode.serviceProviderForLLM: select the LLM provider (Alita, OpenAI, Azure Open AI)
-- alitacode.LLMAuthToken: API key for the selected LLM provider
-- alitacode.LLMServerUrl: URL of the LLM provider server
-- alitacode.apiVersion: Api version, mostly applicable for Azure OpenAI compatible APIs
-- alitacode.LLMModelName: Default model name used for local prompts (Can be overwritten in prompt)
-- alitacode.projectId (optional): Project ID for external prompts (ignored for any OpenAI)
-- alitacode.integrationUid (optional): Integration UID for external prompts (ignored for any OpenAI)
-- alitacode.temperature: Default temperature for model (Can be overwritten in prompt)
-- alitacode.maxTokens: Default max tokens for model (Can be overwritten in prompt)
-- alitacode.topP: Default top P for model (Can be overwritten in prompt)
-- alitacode.topK: Default top K for model (Can be overwritten in prompt)
+- eliteacode.enable: enable/disable this extension
+- eliteacode.serviceProviderForLLM: select the LLM provider (ELITEA)
+- eliteacode.LLMAuthToken: API key for the selected LLM provider
+- eliteacode.LLMServerUrl: URL of the LLM provider server
+- eliteacode.apiVersion: Api version, mostly applicable for Azure OpenAI compatible APIs
+- eliteacode.LLMModelName: Default model name used for local prompts (Can be overwritten in prompt)
+- eliteacode.projectId (optional): Project ID for external prompts (ignored for any OpenAI)
+- eliteacode.integrationUid (optional): Integration UID for external prompts (ignored for any OpenAI)
+- eliteacode.temperature: Default temperature for model (Can be overwritten in prompt)
+- eliteacode.maxTokens: Default max tokens for model (Can be overwritten in prompt)
+- eliteacode.topP: Default top P for model (Can be overwritten in prompt)
+- eliteacode.topK: Default top K for model (Can be overwritten in prompt)
# Development
diff --git a/commands/getAvailableAIModels.js b/commands/getAvailableAIModels.js
index 52f63e1..5180313 100644
--- a/commands/getAvailableAIModels.js
+++ b/commands/getAvailableAIModels.js
@@ -21,22 +21,22 @@ module.exports = async function () {
if (selectedModel) {
const configuration = vscode.workspace.getConfiguration();
const { label: modelName, description: groupName } = selectedModel;
- await configuration.update("alitacode.LLMModelName", modelName, vscode.ConfigurationTarget.Workspace);
- await configuration.update("alitacode.LLMModelName", modelName, vscode.ConfigurationTarget.Global);
+ await configuration.update("eliteacode.LLMModelName", modelName, vscode.ConfigurationTarget.Workspace);
+ await configuration.update("eliteacode.LLMModelName", modelName, vscode.ConfigurationTarget.Global);
const integrationName = await alitaService.getAIModelIntegrationName(groupName, true);
await configuration.update(
- "alitacode.integrationName",
+ "eliteacode.integrationName",
integrationName.toString(),
vscode.ConfigurationTarget.Workspace
);
await configuration.update(
- "alitacode.integrationName",
+ "eliteacode.integrationName",
integrationName.toString(),
vscode.ConfigurationTarget.Global
);
const uid = await alitaService.getAIModelUid(groupName, true);
- await configuration.update("alitacode.integrationUid", uid.toString(), vscode.ConfigurationTarget.Workspace);
- await configuration.update("alitacode.integrationUid", uid.toString(), vscode.ConfigurationTarget.Global);
+ await configuration.update("eliteacode.integrationUid", uid.toString(), vscode.ConfigurationTarget.Workspace);
+ await configuration.update("eliteacode.integrationUid", uid.toString(), vscode.ConfigurationTarget.Global);
vscode.window.showInformationMessage(`You selected: ${modelName} [${groupName}]`);
} else {
vscode.window.showInformationMessage("Operation cancelled.");
diff --git a/commands/initAlita.js b/commands/initAlita.js
index 4d5c030..7fdfefb 100644
--- a/commands/initAlita.js
+++ b/commands/initAlita.js
@@ -42,6 +42,6 @@ module.exports = async function () {
}
fs.writeFileSync(path.join(workspacePath, promptLib, "./prompts.json"), JSON.stringify(prompts, null, 2));
}
- await vscode.commands.executeCommand("setContext", "alita.init", true);
+ await vscode.commands.executeCommand("setContext", "eliteacode.init", true);
return await onConfigChange();
};
diff --git a/commands/onConfigChange.js b/commands/onConfigChange.js
index 451383e..c96159c 100644
--- a/commands/onConfigChange.js
+++ b/commands/onConfigChange.js
@@ -76,17 +76,17 @@ module.exports = async function () {
const { promptLib, workspacePath, LLMProvider, verifySsl, LLMauthToken } = workspaceService.getWorkspaceConfig();
verifyToken(parseJwt(LLMauthToken));
https.globalAgent.options.rejectUnauthorized = verifySsl;
- await vscode.commands.executeCommand("setContext", "alitacode.LLMProvider", LLMProvider);
+ await vscode.commands.executeCommand("setContext", "eliteacode.LLMProvider", LLMProvider);
await vscode.commands.executeCommand(
"setContext",
- "alitacode.LocalPrompts",
+ "eliteacode.LocalPrompts",
!LOCAL_PROMPTS_BLOCKERS.includes(LLMProvider)
);
alitaService.serviceProvider = undefined;
if (promptLib && fs.existsSync(path.join(workspacePath, promptLib, "./prompts.json"))) {
- await vscode.commands.executeCommand("setContext", "alita.init", true);
+ await vscode.commands.executeCommand("setContext", "eliteacode.init", true);
return await workspaceService.updatePrompts();
} else {
- return await vscode.commands.executeCommand("setContext", "alita.init", false);
+ return await vscode.commands.executeCommand("setContext", "eliteacode.init", false);
}
};
diff --git a/commands/predict.js b/commands/predict.js
index fd7eb35..7d76393 100644
--- a/commands/predict.js
+++ b/commands/predict.js
@@ -33,7 +33,6 @@ module.exports = async function () {
const promptsList = await workspaceService.updatePrompts();
// renderring list
const entities = [...promptsList]
- .filter((it) => !it.external)
.map((prompt) => ({
label: prompt.label.replace(/(_prompt|_datasource)$/, ""),
description: prompt.description,
@@ -59,7 +58,7 @@ module.exports = async function () {
vscode.window.withProgress(
{
location: vscode.ProgressLocation.Window,
- title: "Alita prediction...",
+ title: "Elitea prediction...",
cancellable: false,
},
(progress) => {
diff --git a/constants/index.js b/constants/index.js
index a4054c8..52869d8 100644
--- a/constants/index.js
+++ b/constants/index.js
@@ -32,7 +32,7 @@ module.exports = {
},
WORKSPACE: {
EXTENSION: {
- NAME: "alitacode",
+ NAME: "eliteacode",
PARAM: {
ENABLE: "enable",
PROMPT_LIB: "promptLib",
@@ -55,18 +55,18 @@ module.exports = {
},
},
COMMAND: {
- INIT_ALITA: "alitacode.initAlita",
- SYNC_PROMPTS: "alitacode.syncPrompts",
- ADD_EXAMPLE: "alitacode.addExample",
- ADD_CONTEXT: "alitacode.addContext",
- CREATE_PROMPT: "alitacode.createPrompt",
- PREDICT: "alitacode.predict",
+ INIT_ALITA: "eliteacode.initAlita",
+ SYNC_PROMPTS: "eliteacode.syncPrompts",
+ ADD_EXAMPLE: "eliteacode.addExample",
+ ADD_CONTEXT: "eliteacode.addContext",
+ CREATE_PROMPT: "eliteacode.createPrompt",
+ PREDICT: "eliteacode.predict",
OPEN_SETTINGS: "workbench.action.openSettings",
- ADD_GOOD_PREDICTION: "alitacode.addGoodPrediction",
- GET_AVAILABLE_AI_MODELS: "alitacode.getAvailableAIModels",
+ ADD_GOOD_PREDICTION: "eliteacode.addGoodPrediction",
+ GET_AVAILABLE_AI_MODELS: "eliteacode.getAvailableAIModels",
},
TEXT: {
- ALITA_ACTIVATED: "Alita was activated! Please specify configuration",
+ ALITA_ACTIVATED: "Elitea was activated! Please specify configuration",
ENTER_PROMPT_NAME: "Enter prompt name",
ENTER_PROMPT_DESCRIPTION: "Enter prompt description",
ENTER_PROMPT_CONTEXT: "Enter context",
@@ -77,6 +77,6 @@ module.exports = {
MESSAGE: {
CONTEXT_WAS_ADDED: (label) => `Context was added to ${label} prompt!`,
},
- EXTERNAL_PROMPTS_PROVIDERS: ["Alita", "DigitalPlatform"],
+ EXTERNAL_PROMPTS_PROVIDERS: ["ELITEA", "DigitalPlatform"],
LOCAL_PROMPTS_BLOCKERS: ["DigitalPlatform"],
};
diff --git a/extension.js b/extension.js
index d7a8ef6..2aee5b2 100644
--- a/extension.js
+++ b/extension.js
@@ -18,7 +18,6 @@ const { COMMAND, EXTERNAL_PROMPTS_PROVIDERS } = require("./constants/index");
const {
addContext,
addExample,
- createPrompt,
predict,
addGoodPrediction,
initAlita,
@@ -30,7 +29,7 @@ import { CreatePromptPanel } from "./panels/CreatePromptPanel";
const OutputService = require("./services/output.service");
async function activate(context) {
- await vscode.commands.executeCommand("setContext", "alitacode.ExtentablePlatforms", EXTERNAL_PROMPTS_PROVIDERS);
+ await vscode.commands.executeCommand("setContext", "eliteacode.ExtentablePlatforms", EXTERNAL_PROMPTS_PROVIDERS);
try {
await onConfigChange();
} catch (error) {
diff --git a/package-lock.json b/package-lock.json
index cc20fa5..378034f 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -22,7 +22,7 @@
"@types/vscode": "^1.78.0",
"@vscode/test-electron": "^2.3.2",
"@vscode/vsce": "^2.25.0",
- "esbuild": "^0.19.5",
+ "esbuild": "^0.19.12",
"eslint": "^8.41.0",
"eslint-config-prettier": "^8.8.0",
"glob": "^8.1.0",
@@ -1794,6 +1794,7 @@
"integrity": "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==",
"dev": true,
"hasInstallScript": true,
+ "license": "MIT",
"bin": {
"esbuild": "bin/esbuild"
},
diff --git a/package.json b/package.json
index 9794709..ba3c32b 100644
--- a/package.json
+++ b/package.json
@@ -1,8 +1,8 @@
{
"name": "alitacode",
- "displayName": "Alita Code",
+ "displayName": "ELITEA Code",
"description": "Your Virtual Coding Assistant, to help you write better code faster",
- "version": "0.4.0",
+ "version": "0.4.1",
"icon": "images/128x128.png",
"publisher": "ProjectAlita",
"engines": {
@@ -20,70 +20,70 @@
"contributes": {
"commands": [
{
- "command": "alitacode.predict",
- "title": "Alita: Predict"
+ "command": "eliteacode.predict",
+ "title": "Predict"
},
{
- "command": "alitacode.createPrompt",
- "title": "Alita: Create Prompt"
+ "command": "eliteacode.createPrompt",
+ "title": "Create Prompt"
},
{
- "command": "alitacode.addContext",
- "title": "Alita: Extend Context"
+ "command": "eliteacode.addContext",
+ "title": "Extend Context"
},
{
- "command": "alitacode.syncPrompts",
- "title": "Alita: Sync External Prompts"
+ "command": "eliteacode.syncPrompts",
+ "title": "Sync External Prompts"
},
{
- "command": "alitacode.initAlita",
- "title": "Alita: Init"
+ "command": "eliteacode.initAlita",
+ "title": "Init"
},
{
- "command": "alitacode.getAvailableAIModels",
- "title": "Alita: Get available AI models from the server"
+ "command": "eliteacode.getAvailableAIModels",
+ "title": "Get available AI models from the server"
}
],
"menus": {
"editor/context": [
{
"when": "editorFocus && !editorReadonly",
- "submenu": "alitacode.submenu",
- "group": "Alita"
+ "submenu": "eliteacode.submenu",
+ "group": "Elitea"
}
],
- "alitacode.submenu": [
+ "eliteacode.submenu": [
{
- "when": "editorHasSelection && editorFocus && !editorReadonly && alita.init",
- "command": "alitacode.predict"
+ "when": "editorHasSelection && editorFocus && !editorReadonly && eliteacode.init",
+ "command": "eliteacode.predict"
},
{
- "when": "editorFocus && !editorReadonly && alita.init && alitacode.LocalPrompts",
- "command": "alitacode.createPrompt"
+ "when": "editorFocus && !editorReadonly && eliteacode.init && eliteacode.LocalPrompts",
+ "command": "eliteacode.createPrompt"
},
{
- "when": "editorFocus && !alita.init && alitacode.LLMProvider != 'None'",
- "command": "alitacode.initAlita"
+ "when": "editorFocus && !eliteacode.init && eliteacode.LLMProvider != 'None'",
+ "command": "eliteacode.initAlita"
},
{
- "when": "editorHasSelection && editorFocus && !editorReadonly && alita.init && alitacode.LocalPrompts",
- "command": "alitacode.addContext"
+ "when": "editorHasSelection && editorFocus && !editorReadonly && eliteacode.init && eliteacode.LocalPrompts",
+ "command": "eliteacode.addContext"
},
{
- "when": "alita.init && alitacode.LLMProvider in alitacode.ExtentablePlatforms",
- "command": "alitacode.syncPrompts"
+ "when": "eliteacode.init && eliteacode.LLMProvider in eliteacode.ExtentablePlatforms",
+ "command": "eliteacode.syncPrompts"
}
]
},
"submenus": [
{
- "id": "alitacode.submenu",
- "label": "Alita"
+ "id": "eliteacode.submenu",
+ "label": "Elitea"
}
],
"keybindings": [
{
- "command": "alitacode.predict",
+ "command": "eliteacode.predict",
"key": "ctrl+shift+r"
}
],
@@ -91,26 +91,26 @@
{
"title": "Main Settings",
"properties": {
- "alitacode.LLMServerUrl": {
+ "eliteacode.LLMServerUrl": {
"order": 1,
"type": "string",
"default": "",
"description": "URL to LLM service provider"
},
- "alitacode.LLMAuthToken": {
+ "eliteacode.LLMAuthToken": {
"order": 2,
"type": "string",
"default": "",
"pattern": "(^[A-Za-z0-9-_]+\\.[A-Za-z0-9-_]+\\.[A-Za-z0-9-_]{86}$)",
"patternErrorMessage": "Token has incorrect format"
},
- "alitacode.projectId": {
+ "eliteacode.projectId": {
"order": 3,
"type": "number",
"default": 1,
"description": "Project Id in Carrier, ignored for OpenAI and AI Dial"
},
- "alitacode.displayType": {
+ "eliteacode.displayType": {
"order": 4,
"type": "string",
"default": "append",
@@ -122,20 +122,20 @@
],
"description": "Select the default display mode for the predictions"
},
- "alitacode.verifySsl": {
+ "eliteacode.verifySsl": {
"order": 5,
"label": "Verify SSL",
"type": "boolean",
"default": true,
"description": "Verify LLM service provider certificate"
},
- "alitacode.enable": {
+ "eliteacode.enable": {
"order": 6,
"type": "boolean",
"default": true,
"description": "Enable/disable this extension"
},
- "alitacode.debug": {
+ "eliteacode.debug": {
"order": 10,
"type": "boolean",
"default": false,
@@ -146,26 +146,26 @@
{
"title": "Integration Settings",
"properties": {
- "alitacode.selectIntegration": {
+ "eliteacode.selectIntegration": {
"order": 1,
"type": "string",
"default": "Please use [Click here to select] button",
"enum": [
"Please use [Click here to select] button"
],
- "markdownDescription": "[Click here to select](command:alitacode.getAvailableAIModels)"
+ "markdownDescription": "[Click here to select](command:eliteacode.getAvailableAIModels)"
},
- "alitacode.LLMModelName": {
+ "eliteacode.LLMModelName": {
"order": 2,
"type": "string",
"default": "",
"markdownDescription": "Model name used for local prompts, this is deployment name, and it can be different from case to case"
},
- "alitacode.integrationName": {
+ "eliteacode.integrationName": {
"order": 3,
"type": "string"
},
- "alitacode.integrationUid": {
+ "eliteacode.integrationUid": {
"order": 4,
"type": "string",
"default": "Integration UUID goes here",
@@ -176,13 +176,13 @@
{
"title": "Advanced Settings",
"properties": {
- "alitacode.customModelSize": {
+ "eliteacode.customModelSize": {
"order": 1,
"type": "number",
"default": 4096,
"description": "Type custom model's tokens size"
},
- "alitacode.maxTokens": {
+ "eliteacode.maxTokens": {
"order": 2,
"label": "Max Tokens default value",
"type": "number",
@@ -191,7 +191,7 @@
"maximum": 8096,
"description": "Max tokens value for the model"
},
- "alitacode.temperature": {
+ "eliteacode.temperature": {
"order": 3,
"label": "Temperature default value",
"type": "number",
@@ -200,7 +200,7 @@
"maximum": 1,
"description": "Temperature value for the model"
},
- "alitacode.topK": {
+ "eliteacode.topK": {
"order": 4,
"label": "Top K default value",
"type": "number",
@@ -209,7 +209,7 @@
"maximum": 40,
"description": "Top K value for the model"
},
- "alitacode.topP": {
+ "eliteacode.topP": {
"order": 5,
"label": "Top P default value",
"type": "number",
@@ -251,7 +251,7 @@
"@types/vscode": "^1.78.0",
"@vscode/test-electron": "^2.3.2",
"@vscode/vsce": "^2.25.0",
- "esbuild": "^0.19.5",
+ "esbuild": "^0.19.12",
"eslint": "^8.41.0",
"eslint-config-prettier": "^8.8.0",
"glob": "^8.1.0",
diff --git a/panels/CreatePromptPanel.html b/panels/CreatePromptPanel.html
index bc1f443..2b8d298 100644
--- a/panels/CreatePromptPanel.html
+++ b/panels/CreatePromptPanel.html
@@ -1,39 +1,81 @@
-
-
-
-
-
-
-
-
-
-
-
-
-