This repository has been archived by the owner on Aug 27, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 35
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #31 from Aincvy/output-channel-comfortable-completion
- Loading branch information
Showing
11 changed files
with
862 additions
and
138 deletions.
There are no files selected for viewing
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,73 @@ | ||
import { fauxpilotClient, RequestType } from "./FauxpilotClient"; | ||
|
||
// import * as http from 'http' | ||
// import * as https from 'https' | ||
import axios from "axios"; | ||
import { AxiosInstance } from "axios"; | ||
import OpenAI from 'openai'; | ||
|
||
const http = require('http'); | ||
const https = require('https'); | ||
|
||
// It's quite strange, this does not work. The server received a request with `Connection: close` , | ||
// even if using Node to run a simple script, the server receives a request with `Connection: keep-alive`. | ||
// Currently, whether using OpenAI or axios, it is impossible to achieve keep alive. | ||
const httpAgent = new http.Agent({ keepAlive: true }); | ||
const httpsAgent = new https.Agent({ keepAlive: true }); | ||
|
||
|
||
class AccessBackendCache { | ||
private openai: OpenAI; | ||
private axiosInstance: AxiosInstance; | ||
|
||
|
||
constructor() { | ||
this.openai = new OpenAI({ apiKey: fauxpilotClient.Token, baseURL: fauxpilotClient.BaseUrl }); | ||
this.axiosInstance = axios.create({ | ||
httpAgent, | ||
httpsAgent, | ||
baseURL: fauxpilotClient.BaseUrl, | ||
timeout: 20000, | ||
}); | ||
|
||
} | ||
|
||
public fetchUseOpenAI(data: any): Promise<OpenAI.Completion>{ | ||
return this.openai.completions.create(data); | ||
} | ||
|
||
public fetchUseAxios(data: any): Promise<OpenAI.Completion> { | ||
return this.axiosInstance.post('/completions', data).then(response => response.data); | ||
} | ||
} | ||
|
||
let cacheInScript: AccessBackendCache; | ||
|
||
export function rebuildAccessBackendCache() { | ||
cacheInScript = new AccessBackendCache(); | ||
} | ||
|
||
function getCache(): AccessBackendCache { | ||
if (!cacheInScript) { | ||
console.log("rebuilding access backend cache"); | ||
rebuildAccessBackendCache(); | ||
} | ||
return cacheInScript; | ||
} | ||
|
||
export function fetch(prompt: string): Promise<OpenAI.Completion> { | ||
|
||
const data = { | ||
model: fauxpilotClient.Model, | ||
prompt: prompt, | ||
max_tokens: fauxpilotClient.MaxTokens, | ||
temperature: fauxpilotClient.Temperature, | ||
stop: fauxpilotClient.StopWords | ||
}; | ||
|
||
if (fauxpilotClient.RequestType == RequestType.OpenAI) { | ||
return getCache().fetchUseOpenAI(data); | ||
} else { | ||
return getCache().fetchUseAxios(data); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,2 @@ | ||
// Proportion of lines from the beginning of the file in the prompt | ||
export const LEADING_LINES_PROP = 0.15; | ||
export const LEADING_LINES_PROP = 0.21; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,153 @@ | ||
import { WorkspaceConfiguration, OutputChannel, ConfigurationTarget } from "vscode"; | ||
import { currentTimeString } from "./Utils"; | ||
import { rebuildAccessBackendCache } from "./AccessBackend"; | ||
|
||
export enum RequestType { | ||
OpenAI, | ||
Aixos | ||
} | ||
|
||
export class FauxpilotClient { | ||
private outputChannel?: OutputChannel; | ||
private extConfig?: WorkspaceConfiguration; | ||
private enabled = false; | ||
private suggestionDelay = 0; | ||
private excludeFileExts: Array<String>; | ||
private baseUrl: string; | ||
private model: string; | ||
private maxTokens: number; | ||
private temperature: number; | ||
private stopWords: string[]; | ||
private token: string; | ||
private requestType = RequestType.OpenAI; | ||
private maxLines: number; | ||
|
||
public version: string; | ||
|
||
constructor() { | ||
// this.outputChannel = null; | ||
this.excludeFileExts = []; | ||
this.baseUrl = ''; | ||
this.model = '<<UNSET>>'; | ||
this.maxTokens = 80; | ||
this.temperature = 0.5; | ||
this.stopWords = []; | ||
this.version = ''; | ||
this.token = ''; | ||
this.maxLines = 150; | ||
} | ||
|
||
public init(extConfig: WorkspaceConfiguration, channel: OutputChannel) { | ||
|
||
this.extConfig = extConfig; | ||
this.outputChannel = channel; | ||
this.reload(extConfig); | ||
} | ||
|
||
public reload(extConfig: WorkspaceConfiguration) { | ||
this.extConfig = extConfig; | ||
this.enabled = extConfig.get<boolean>("enabled", false) ?? false; | ||
this.suggestionDelay = extConfig.get("suggestionDelay", 0) ?? 0; | ||
this.baseUrl = `${extConfig.get("server")}/${extConfig.get("engine")}`; | ||
|
||
this.excludeFileExts = []; | ||
// let excludeFileExtsConfig = extConfig.get("excludeFileExts", new Map<String, Boolean>()); | ||
let excludeFileExtsConfig: { [key: string]: boolean } = extConfig.get("excludeFileExts", {}); | ||
for (const key in excludeFileExtsConfig as object) { | ||
if (excludeFileExtsConfig[key]) { | ||
this.excludeFileExts.push(key); | ||
} | ||
} | ||
|
||
this.model = extConfig.get("model") ?? "<<UNSET>>"; | ||
this.maxTokens = extConfig.get("maxTokens", 80); | ||
this.temperature = extConfig.get("temperature", 0.5); | ||
this.stopWords = extConfig.get("inlineCompletion") ? ["\n"] : []; | ||
this.token = extConfig.get("token", ''); | ||
this.requestType = extConfig.get("requestType", 'openai') === 'openai' ? RequestType.OpenAI : RequestType.Aixos; | ||
this.maxLines = extConfig.get("maxLines", 150); | ||
|
||
this.log(`enabled = ${this.enabled}`); | ||
this.log(`baseUrl = ${this.baseUrl}`); | ||
this.log(`suggestionDelay = ${this.suggestionDelay}`); | ||
this.log(`excludeFileExts = ${this.excludeFileExts}`); | ||
this.log(`model = ${this.model}`); | ||
this.log(`maxTokens = ${this.maxTokens}`); | ||
this.log(`temperature = ${this.temperature}`); | ||
this.log(`stopWords = ${this.stopWords}`); | ||
this.log(`token = ${this.token}`); | ||
this.log(`requestType = ${this.requestType}`); | ||
this.log(`maxLines = ${this.maxLines}`); | ||
|
||
rebuildAccessBackendCache(); | ||
this.log("reload config finish."); | ||
} | ||
|
||
public log(str: string) { | ||
if (!this.outputChannel) { | ||
console.log('[Error] outputChannel is undefined!'); | ||
return; | ||
} | ||
this.outputChannel?.appendLine(`${currentTimeString()} ${str}`); | ||
} | ||
|
||
public get isEnabled(): boolean { | ||
return this.enabled; | ||
} | ||
|
||
public set isEnabled(value: boolean) { | ||
if (this.isEnabled !== value) { | ||
this.enabled = value; | ||
this.extConfig?.update("enabled", value); | ||
this.outputChannel?.appendLine("change status to: " + this.enabled); | ||
} | ||
} | ||
|
||
public get OutputChannel(): OutputChannel| undefined { | ||
return this.outputChannel; | ||
} | ||
|
||
public get SuggestionDelay(): number { | ||
return this.suggestionDelay; | ||
} | ||
|
||
public get BaseUrl(): string { | ||
return this.baseUrl; | ||
} | ||
|
||
public get ExcludeFileExts(): Array<String> { | ||
return this.excludeFileExts; | ||
} | ||
|
||
public get Model(): string { | ||
return this.model; | ||
} | ||
|
||
public get MaxTokens(): number { | ||
return this.maxTokens; | ||
} | ||
|
||
public get MaxLines(): number { | ||
return this.maxLines; | ||
} | ||
public get Temperature(): number { | ||
return this.temperature; | ||
} | ||
public get StopWords(): Array<string> { | ||
return this.stopWords; | ||
} | ||
|
||
public get Token(): string { | ||
return this.token; | ||
} | ||
|
||
public get RequestType(): RequestType { | ||
return this.requestType; | ||
} | ||
|
||
} | ||
|
||
const client = new FauxpilotClient(); | ||
|
||
export const fauxpilotClient = client; | ||
|
Oops, something went wrong.