Skip to content

Commit b90b98b

Browse files
authored
(fix) LiteLLM Proxy fix GET /files/{file_id:path}/content" endpoint (#7342)
* fix order of get_file_content * update e2 files tests * add e2 batches endpoint testing * update config.yml * write content to file * use correct oai_misc_config * fixes for openai batches endpoint testing * remove extra out file * fix input.jsonl
1 parent c788937 commit b90b98b

File tree

10 files changed

+351
-72
lines changed

10 files changed

+351
-72
lines changed

.circleci/config.yml

Lines changed: 125 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -998,6 +998,124 @@ jobs:
998998
python -m pytest -s -vv tests/*.py -x --junitxml=test-results/junit.xml --durations=5 --ignore=tests/otel_tests --ignore=tests/pass_through_tests --ignore=tests/proxy_admin_ui_tests --ignore=tests/load_tests --ignore=tests/llm_translation --ignore=tests/image_gen_tests --ignore=tests/pass_through_unit_tests
999999
no_output_timeout: 120m
10001000

1001+
# Store test results
1002+
- store_test_results:
1003+
path: test-results
1004+
e2e_openai_misc_endpoints:
1005+
machine:
1006+
image: ubuntu-2204:2023.10.1
1007+
resource_class: xlarge
1008+
working_directory: ~/project
1009+
steps:
1010+
- checkout
1011+
- run:
1012+
name: Install Docker CLI (In case it's not already installed)
1013+
command: |
1014+
sudo apt-get update
1015+
sudo apt-get install -y docker-ce docker-ce-cli containerd.io
1016+
- run:
1017+
name: Install Python 3.9
1018+
command: |
1019+
curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh
1020+
bash miniconda.sh -b -p $HOME/miniconda
1021+
export PATH="$HOME/miniconda/bin:$PATH"
1022+
conda init bash
1023+
source ~/.bashrc
1024+
conda create -n myenv python=3.9 -y
1025+
conda activate myenv
1026+
python --version
1027+
- run:
1028+
name: Install Dependencies
1029+
command: |
1030+
pip install "pytest==7.3.1"
1031+
pip install "pytest-asyncio==0.21.1"
1032+
pip install aiohttp
1033+
python -m pip install --upgrade pip
1034+
python -m pip install -r .circleci/requirements.txt
1035+
pip install "pytest==7.3.1"
1036+
pip install "pytest-retry==1.6.3"
1037+
pip install "pytest-mock==3.12.0"
1038+
pip install "pytest-asyncio==0.21.1"
1039+
pip install mypy
1040+
pip install "jsonlines==4.0.0"
1041+
pip install "google-generativeai==0.3.2"
1042+
pip install "google-cloud-aiplatform==1.43.0"
1043+
pip install pyarrow
1044+
pip install "boto3==1.34.34"
1045+
pip install "aioboto3==12.3.0"
1046+
pip install langchain
1047+
pip install "langfuse>=2.0.0"
1048+
pip install "logfire==0.29.0"
1049+
pip install numpydoc
1050+
pip install prisma
1051+
pip install fastapi
1052+
pip install jsonschema
1053+
pip install "httpx==0.24.1"
1054+
pip install "gunicorn==21.2.0"
1055+
pip install "anyio==3.7.1"
1056+
pip install "aiodynamo==23.10.1"
1057+
pip install "asyncio==3.4.3"
1058+
pip install "PyGithub==1.59.1"
1059+
pip install "openai==1.54.0 "
1060+
# Run pytest and generate JUnit XML report
1061+
- run:
1062+
name: Build Docker image
1063+
command: docker build -t my-app:latest -f ./docker/Dockerfile.database .
1064+
- run:
1065+
name: Run Docker container
1066+
command: |
1067+
docker run -d \
1068+
-p 4000:4000 \
1069+
-e DATABASE_URL=$PROXY_DATABASE_URL \
1070+
-e AZURE_API_KEY=$AZURE_API_KEY \
1071+
-e REDIS_HOST=$REDIS_HOST \
1072+
-e REDIS_PASSWORD=$REDIS_PASSWORD \
1073+
-e REDIS_PORT=$REDIS_PORT \
1074+
-e AZURE_FRANCE_API_KEY=$AZURE_FRANCE_API_KEY \
1075+
-e AZURE_EUROPE_API_KEY=$AZURE_EUROPE_API_KEY \
1076+
-e MISTRAL_API_KEY=$MISTRAL_API_KEY \
1077+
-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \
1078+
-e GROQ_API_KEY=$GROQ_API_KEY \
1079+
-e ANTHROPIC_API_KEY=$ANTHROPIC_API_KEY \
1080+
-e COHERE_API_KEY=$COHERE_API_KEY \
1081+
-e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \
1082+
-e AWS_REGION_NAME=$AWS_REGION_NAME \
1083+
-e AUTO_INFER_REGION=True \
1084+
-e OPENAI_API_KEY=$OPENAI_API_KEY \
1085+
-e LITELLM_LICENSE=$LITELLM_LICENSE \
1086+
-e LANGFUSE_PROJECT1_PUBLIC=$LANGFUSE_PROJECT1_PUBLIC \
1087+
-e LANGFUSE_PROJECT2_PUBLIC=$LANGFUSE_PROJECT2_PUBLIC \
1088+
-e LANGFUSE_PROJECT1_SECRET=$LANGFUSE_PROJECT1_SECRET \
1089+
-e LANGFUSE_PROJECT2_SECRET=$LANGFUSE_PROJECT2_SECRET \
1090+
--name my-app \
1091+
-v $(pwd)/litellm/proxy/example_config_yaml/oai_misc_config.yaml:/app/config.yaml \
1092+
my-app:latest \
1093+
--config /app/config.yaml \
1094+
--port 4000 \
1095+
--detailed_debug \
1096+
- run:
1097+
name: Install curl and dockerize
1098+
command: |
1099+
sudo apt-get update
1100+
sudo apt-get install -y curl
1101+
sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz
1102+
sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz
1103+
sudo rm dockerize-linux-amd64-v0.6.1.tar.gz
1104+
- run:
1105+
name: Start outputting logs
1106+
command: docker logs -f my-app
1107+
background: true
1108+
- run:
1109+
name: Wait for app to be ready
1110+
command: dockerize -wait http://localhost:4000 -timeout 5m
1111+
- run:
1112+
name: Run tests
1113+
command: |
1114+
pwd
1115+
ls
1116+
python -m pytest -s -vv tests/openai_misc_endpoints_tests --junitxml=test-results/junit.xml --durations=5
1117+
no_output_timeout: 120m
1118+
10011119
# Store test results
10021120
- store_test_results:
10031121
path: test-results
@@ -1572,6 +1690,12 @@ workflows:
15721690
only:
15731691
- main
15741692
- /litellm_.*/
1693+
- e2e_openai_misc_endpoints:
1694+
filters:
1695+
branches:
1696+
only:
1697+
- main
1698+
- /litellm_.*/
15751699
- proxy_logging_guardrails_model_info_tests:
15761700
filters:
15771701
branches:
@@ -1655,6 +1779,7 @@ workflows:
16551779
requires:
16561780
- local_testing
16571781
- build_and_test
1782+
- e2e_openai_misc_endpoints
16581783
- load_testing
16591784
- test_bad_database_url
16601785
- llm_translation_testing
Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
model_list:
2+
- model_name: gpt-3.5-turbo-end-user-test
3+
litellm_params:
4+
model: gpt-3.5-turbo
5+
region_name: "eu"
6+
model_info:
7+
id: "1"
8+
- model_name: "*"
9+
litellm_params:
10+
model: openai/*
11+
api_key: os.environ/OPENAI_API_KEY
12+
# provider specific wildcard routing
13+
- model_name: "anthropic/*"
14+
litellm_params:
15+
model: "anthropic/*"
16+
api_key: os.environ/ANTHROPIC_API_KEY
17+
- model_name: "groq/*"
18+
litellm_params:
19+
model: "groq/*"
20+
api_key: os.environ/GROQ_API_KEY
21+
litellm_settings:
22+
# set_verbose: True # Uncomment this if you want to see verbose logs; not recommended in production
23+
drop_params: True
24+
# max_budget: 100
25+
# budget_duration: 30d
26+
num_retries: 5
27+
request_timeout: 600
28+
telemetry: False
29+
context_window_fallbacks: [{"gpt-3.5-turbo": ["gpt-3.5-turbo-large"]}]
30+
default_team_settings:
31+
- team_id: team-1
32+
success_callback: ["langfuse"]
33+
failure_callback: ["langfuse"]
34+
langfuse_public_key: os.environ/LANGFUSE_PROJECT1_PUBLIC # Project 1
35+
langfuse_secret: os.environ/LANGFUSE_PROJECT1_SECRET # Project 1
36+
- team_id: team-2
37+
success_callback: ["langfuse"]
38+
failure_callback: ["langfuse"]
39+
langfuse_public_key: os.environ/LANGFUSE_PROJECT2_PUBLIC # Project 2
40+
langfuse_secret: os.environ/LANGFUSE_PROJECT2_SECRET # Project 2
41+
langfuse_host: https://us.cloud.langfuse.com
42+
43+
# For /fine_tuning/jobs endpoints
44+
finetune_settings:
45+
- custom_llm_provider: azure
46+
api_base: https://exampleopenaiendpoint-production.up.railway.app
47+
api_key: fake-key
48+
api_version: "2023-03-15-preview"
49+
- custom_llm_provider: openai
50+
api_key: os.environ/OPENAI_API_KEY
51+
52+
# for /files endpoints
53+
files_settings:
54+
- custom_llm_provider: azure
55+
api_base: https://exampleopenaiendpoint-production.up.railway.app
56+
api_key: fake-key
57+
api_version: "2023-03-15-preview"
58+
- custom_llm_provider: openai
59+
api_key: os.environ/OPENAI_API_KEY
60+
61+
general_settings:
62+
master_key: sk-1234 # [OPTIONAL] Use to enforce auth on proxy. See - https://docs.litellm.ai/docs/proxy/virtual_keys

0 commit comments

Comments
 (0)