From b96732d00f97f4819fbd1cd4e9da8323db42cc4e Mon Sep 17 00:00:00 2001 From: Rick Lamers Date: Sun, 21 May 2023 08:43:59 +0200 Subject: [PATCH] Proxy kernel manager and make ports configurable. --- README.md | 5 +++++ frontend/src/config.tsx | 6 ++++-- gpt_code_ui/kernel_program/main.py | 2 +- gpt_code_ui/webapp/main.py | 33 +++++++++++++++++++++++++++--- setup.py | 2 +- 5 files changed, 41 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 97118aa1..7e2021ed 100644 --- a/README.md +++ b/README.md @@ -30,5 +30,10 @@ $ gptcode ### Using .env for OpenAI key You can put a .env in the working directory to load the `OPENAI_API_KEY` environment variable. +### Configurables +Set the `API_PORT` and `WEB_PORT` variables to override the defaults. + +Set `OPENAI_BASE_URL` to change the OpenAI API endpoint that's being used (note this environment variable includes the protocol `https://...`). + ## Contributing Please do and have a look at the [contributions guide](.github/CONTRIBUTING.md)! This should be a community initiative. I'll try my best to be responsive. diff --git a/frontend/src/config.tsx b/frontend/src/config.tsx index 7581232f..955939f8 100644 --- a/frontend/src/config.tsx +++ b/frontend/src/config.tsx @@ -1,6 +1,8 @@ +let resolvedWebAddress = import.meta.env.VITE_WEB_ADDRESS ? import.meta.env.VITE_WEB_ADDRESS : ""; + const Config = { - WEB_ADDRESS: "http://localhost:8080", - API_ADDRESS: "http://localhost:5010" + WEB_ADDRESS: resolvedWebAddress, + API_ADDRESS: resolvedWebAddress + "/api" } export default Config; \ No newline at end of file diff --git a/gpt_code_ui/kernel_program/main.py b/gpt_code_ui/kernel_program/main.py index 8dac89cf..894deba6 100644 --- a/gpt_code_ui/kernel_program/main.py +++ b/gpt_code_ui/kernel_program/main.py @@ -20,7 +20,7 @@ import gpt_code_ui.kernel_program.config as config import gpt_code_ui.kernel_program.utils as utils -APP_PORT = 5010 +APP_PORT = int(os.environ.get("API_PORT", 5010)) # Get global logger logger = config.get_logger() diff --git a/gpt_code_ui/webapp/main.py b/gpt_code_ui/webapp/main.py index ee03bd75..fb526794 100644 --- a/gpt_code_ui/webapp/main.py +++ b/gpt_code_ui/webapp/main.py @@ -10,9 +10,11 @@ from collections import deque from flask_cors import CORS -from flask import Flask, request, jsonify, send_from_directory +from flask import Flask, request, jsonify, send_from_directory, Response from dotenv import load_dotenv +from gpt_code_ui.kernel_program.main import APP_PORT as KERNEL_APP_PORT + load_dotenv('.env') OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "") @@ -21,7 +23,9 @@ UPLOAD_FOLDER = 'workspace/' os.makedirs(UPLOAD_FOLDER, exist_ok=True) -APP_PORT = 8080 + +APP_PORT = int(os.environ.get("WEB_PORT", 8080)) + class LimitedLengthString: def __init__(self, maxlen=2000): @@ -43,6 +47,7 @@ def get_string(self): message_buffer = LimitedLengthString() + def allowed_file(filename): return True @@ -70,7 +75,7 @@ async def get_code(user_prompt, user_openai_key=None, model="gpt-3.5-turbo"): "Content-Type": "application/json", "Authorization": f"Bearer {final_openai_key}", } - + response = requests.post( f"{OPENAI_BASE_URL}/v1/chat/completions", data=json.dumps(data), @@ -110,9 +115,31 @@ def extract_code(text): @app.route('/') def index(): + + # Check if index.html exists in the static folder + if not os.path.exists(os.path.join(app.root_path, 'static/index.html')): + print("index.html not found in static folder. Exiting. Did you forget to run `make compile_frontend` before installing the local package?") + return send_from_directory('static', 'index.html') +@app.route('/api/', methods=["GET", "POST"]) +def proxy_kernel_manager(path): + if request.method == "POST": + resp = requests.post( + f'http://localhost:{KERNEL_APP_PORT}/{path}', json=request.get_json()) + else: + resp = requests.get(f'http://localhost:{KERNEL_APP_PORT}/{path}') + + excluded_headers = ['content-encoding', + 'content-length', 'transfer-encoding', 'connection'] + headers = [(name, value) for (name, value) in resp.raw.headers.items() + if name.lower() not in excluded_headers] + + response = Response(resp.content, resp.status_code, headers) + return response + + @app.route('/assets/') def serve_static(path): return send_from_directory('static/assets/', path) diff --git a/setup.py b/setup.py index c2de7397..5f87c408 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ setup( name='gpt_code_ui', - version='0.42.17', + version='0.42.18', description="An Open Source version of ChatGPT Code Interpreter", long_description=long_description, long_description_content_type='text/markdown', # This field specifies the format of the `long_description`.