Skip to content

Commit

Permalink
OpenRouter via LiteLLM
Browse files Browse the repository at this point in the history
  • Loading branch information
joshpxyne committed Oct 25, 2023
1 parent af2dab8 commit 9f89422
Show file tree
Hide file tree
Showing 5 changed files with 37 additions and 63 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ poetry install

This will create a virtual environment and install all the necessary dependencies in that environment.

2. Set your [OpenRouter API key](https://openrouter.ai/docs#api-keys) and install the python requirements:
2. Set your [OpenAI API key](https://platform.openai.com/account/api-keys) and install the python requirements:

`export OPENROUTER_API_KEY=<your key>`
`export OPENAI_API_KEY=<your key>`

`pip install -r requirements.txt`

Expand Down
82 changes: 29 additions & 53 deletions gpt_migrate/ai.py
Original file line number Diff line number Diff line change
@@ -1,41 +1,32 @@
from langchain.chat_models import ChatOpenAI
from config import OPENAI_API_KEY
import os
import openai
from utils import parse_code_string
from litellm import completion

openai.api_base = "https://openrouter.ai/api/v1"
openai.api_key = os.getenv("OPENROUTER_API_KEY")
openai.api_key = os.getenv("OPENAI_API_KEY")

class AI:
def __init__(self, model="gpt-4-32k", model_provider="openai", modelrouter="openrouter", temperature=0.1, max_tokens=10000):
def __init__(self, model='openrouter/openai/gpt-4-32k', temperature=0.1, max_tokens=10000):
self.temperature = temperature
self.max_tokens = max_tokens
self.model_provider = model_provider
self.model_name = model
self.modelrouter = modelrouter
try:
_ = ChatOpenAI(model_name=model) # check to see if model is available to user
except Exception as e:
print(e)
self.model_name = "gpt-3.5-turbo"

def write_code(self, prompt):
message=[{"role": "user", "content": str(prompt)}]
if self.modelrouter == "openrouter":
response = openai.ChatCompletion.create(
model="{}/{}".format(self.model_provider,self.model_name), # Optional (user controls the default)
messages=message,
stream=False,
max_tokens=self.max_tokens,
temperature=self.temperature,
headers={
"HTTP-Referer": "https://gpt-migrate.com",
"X-Title": "GPT-Migrate",
},
)
else:
response = completion(
messages=message,
stream=False,
model=self.model_name,
max_tokens=self.max_tokens,
temperature=self.temperature
)
response = completion(
messages=message,
stream=False,
model=self.model_name,
max_tokens=self.max_tokens,
temperature=self.temperature
)
if response["choices"][0]["message"]["content"].startswith("INSTRUCTIONS:"):
return ("INSTRUCTIONS:","",response["choices"][0]["message"]["content"][14:])
else:
Expand All @@ -44,32 +35,17 @@ def write_code(self, prompt):

def run(self, prompt):
message=[{"role": "user", "content": str(prompt)}]
if self.modelrouter == "openrouter":
response = openai.ChatCompletion.create(
model="{}/{}".format(self.model_provider,self.model_name), # Optional (user controls the default)
messages=message,
stream=False,
max_tokens=self.max_tokens,
temperature=self.temperature,
headers={
"HTTP-Referer": "https://gpt-migrate.com",
"X-Title": "GPT-Migrate",
},
)
return response["choices"][0]["message"]["content"]
else:
response = completion(
messages=message,
stream=True,
model=self.model_name,
max_tokens=self.max_tokens,
temperature=self.temperature
)
chat = ""
for chunk in response:
delta = chunk["choices"][0]["delta"]
msg = delta.get("content", "")
chat += msg
return chat

response = completion(
messages=message,
stream=True,
model=self.model_name,
max_tokens=self.max_tokens,
temperature=self.temperature
)
chat = ""
for chunk in response:
delta = chunk["choices"][0]["delta"]
msg = delta.get("content", "")
chat += msg
return chat

6 changes: 1 addition & 5 deletions gpt_migrate/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,7 @@ def __init__(self, sourcedir, targetdir, sourcelang, targetlang, sourceentry, so

@app.command()
def main(
model: str = typer.Option("gpt-4-32k", help="Large Language Model to be used."),
model_provider: str = typer.Option("openai", help="Model provider to be used."),
modelrouter: str = typer.Option("openrouter", help="Model router to be used. Options are 'openrouter' or 'litellm'."),
model: str = typer.Option('openrouter/openai/gpt-4-32k', help="Large Language Model to be used. Default is 'openrouter/openai/gpt-4-32k'. To use OpenAI directly with your API key, use 'gpt-4-32k'."),
temperature: float = typer.Option(0, help="Temperature setting for the AI model."),
sourcedir: str = typer.Option("../benchmarks/flask-nodejs/source", help="Source directory containing the code to be migrated."),
sourcelang: str = typer.Option(None, help="Source language or framework of the code to be migrated."),
Expand All @@ -50,9 +48,7 @@ def main(

ai = AI(
model=model,
model_provider=model_provider,
temperature=temperature,
modelrouter=modelrouter
)

sourcedir = os.path.abspath(sourcedir)
Expand Down
7 changes: 5 additions & 2 deletions gpt_migrate/memory/external_dependencies
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
express
rocket
serde
serde_json
bcrypt
node-json-db
rusqlite
serde_json
1 change: 0 additions & 1 deletion gpt_migrate/memory/gpt_migrate/db.js_sigs.json

This file was deleted.

0 comments on commit 9f89422

Please sign in to comment.