From f67187399223d7d97cadf652eed6266035dd2f1f Mon Sep 17 00:00:00 2001 From: ks6088ts Date: Wed, 8 May 2024 00:46:33 +0900 Subject: [PATCH] fix route --- Makefile | 1 + azure_openai.env.sample | 2 +- .../routers/azure_ai_document_intelligence.py | 2 +- backend/routers/azure_ai_vision.py | 4 +- backend/routers/azure_event_grid.py | 2 +- backend/routers/azure_openai.py | 4 +- backend/routers/azure_storage_blob.py | 6 +-- backend/routers/azure_storage_queue.py | 10 ++--- .../azure_ai_document_intelligence.py | 2 +- backend/settings/azure_ai_vision.py | 2 +- backend/settings/azure_openai.py | 2 +- docs/README.md | 43 ++++++++++++++++--- frontend/solutions/azure_ai_vision.py | 2 +- frontend/solutions/document_intelligence.py | 2 +- main.py | 2 +- 15 files changed, 60 insertions(+), 26 deletions(-) diff --git a/Makefile b/Makefile index 1e8aa09..73dca26 100644 --- a/Makefile +++ b/Makefile @@ -67,6 +67,7 @@ docker-build: ## build Docker image --file $(DOCKER_FILE) \ --build-arg GIT_REVISION=$(GIT_REVISION) \ --build-arg GIT_TAG=$(GIT_TAG) \ + --no-cache \ . .PHONY: docker-run diff --git a/azure_openai.env.sample b/azure_openai.env.sample index 2d32da2..4cf5c3f 100644 --- a/azure_openai.env.sample +++ b/azure_openai.env.sample @@ -1,4 +1,4 @@ -AZURE_OPENAI_ENDPOINT = "https://.openai.azure.com/" +AZURE_OPENAI_ENDPOINT = "https://.openai.azure.com" AZURE_OPENAI_API_KEY = "" AZURE_OPENAI_API_VERSION = "2024-04-01-preview" AZURE_OPENAI_EMBEDDING_MODEL = "text-embedding-ada-002" diff --git a/backend/routers/azure_ai_document_intelligence.py b/backend/routers/azure_ai_document_intelligence.py index 0e807fa..979231c 100644 --- a/backend/routers/azure_ai_document_intelligence.py +++ b/backend/routers/azure_ai_document_intelligence.py @@ -20,7 +20,7 @@ @router.post( - "/analyze_document/", + "/analyze_document", response_model=azure_ai_document_intelligence_schemas.AnalyzeDocumentResponse, status_code=200, ) diff --git a/backend/routers/azure_ai_vision.py b/backend/routers/azure_ai_vision.py index e0aefa9..8042088 100644 --- a/backend/routers/azure_ai_vision.py +++ b/backend/routers/azure_ai_vision.py @@ -20,7 +20,7 @@ @router.post( - "/image/analyze/", + "/image/analyze", response_model=azure_ai_vision_schemas.ImageAnalysisResponse, status_code=200, ) @@ -39,7 +39,7 @@ async def analyze_image(file: UploadFile): @router.post( - "/image/vectorize/", + "/image/vectorize", status_code=200, ) async def vectorize_image(file: UploadFile): diff --git a/backend/routers/azure_event_grid.py b/backend/routers/azure_event_grid.py index daf9847..a3416af 100644 --- a/backend/routers/azure_event_grid.py +++ b/backend/routers/azure_event_grid.py @@ -19,7 +19,7 @@ @router.post( - "/event_grid_event/", + "/event_grid_event", status_code=200, ) async def send_event_grid_event( diff --git a/backend/routers/azure_openai.py b/backend/routers/azure_openai.py index 7cbd0ac..1dd6af4 100644 --- a/backend/routers/azure_openai.py +++ b/backend/routers/azure_openai.py @@ -20,7 +20,7 @@ @router.post( - "/chat_completions/", + "/chat_completions", response_model=azure_openai_schemas.ChatCompletionResponse, status_code=200, ) @@ -35,7 +35,7 @@ async def create_chat_completions(body: azure_openai_schemas.ChatCompletionReque @router.post( - "/chat_completions_with_vision/", + "/chat_completions_with_vision", response_model=azure_openai_schemas.ChatCompletionWithVisionResponse, status_code=200, ) diff --git a/backend/routers/azure_storage_blob.py b/backend/routers/azure_storage_blob.py index cfa6ad4..d16bc4b 100644 --- a/backend/routers/azure_storage_blob.py +++ b/backend/routers/azure_storage_blob.py @@ -21,7 +21,7 @@ @router.post( - "/blobs/upload/", + "/blobs/upload", response_model=azure_storage_schemas.BlobUploadResponse, status_code=200, ) @@ -44,7 +44,7 @@ async def upload_blob( @router.delete( - "/blobs/delete/", + "/blobs/delete", status_code=200, ) async def delete_blob( @@ -64,7 +64,7 @@ async def delete_blob( @router.get( - "/blobs/", + "/blobs", status_code=200, ) async def list_blobs(): diff --git a/backend/routers/azure_storage_queue.py b/backend/routers/azure_storage_queue.py index 80f3015..7539f9e 100644 --- a/backend/routers/azure_storage_queue.py +++ b/backend/routers/azure_storage_queue.py @@ -21,7 +21,7 @@ @router.post( - "/queues/", + "/queues", response_model=azure_storage_queue_schemas.CreateQueueResponse, status_code=200, ) @@ -41,7 +41,7 @@ async def create_queue( @router.delete( - "/queues/", + "/queues", response_model=azure_storage_queue_schemas.DeleteQueueResponse, status_code=200, ) @@ -61,7 +61,7 @@ async def delete_queue( @router.post( - "/messages/", + "/messages", response_model=azure_storage_queue_schemas.SendMessageResponse, status_code=200, ) @@ -81,7 +81,7 @@ async def send_message( @router.get( - "/messages/", + "/messages", status_code=200, ) async def receive_messages( @@ -112,7 +112,7 @@ async def receive_messages( @router.delete( - "/messages/", + "/messages", response_model=azure_storage_queue_schemas.DeleteMessageResponse, status_code=200, ) diff --git a/backend/settings/azure_ai_document_intelligence.py b/backend/settings/azure_ai_document_intelligence.py index 3c4cc79..7c52902 100644 --- a/backend/settings/azure_ai_document_intelligence.py +++ b/backend/settings/azure_ai_document_intelligence.py @@ -3,7 +3,7 @@ class Settings(BaseSettings): azure_ai_document_intelligence_endpoint: str = ( - "https://.cognitiveservices.azure.com/" + "https://.cognitiveservices.azure.com" ) azure_ai_document_intelligence_api_key: str = "" diff --git a/backend/settings/azure_ai_vision.py b/backend/settings/azure_ai_vision.py index 927980b..23e68bf 100644 --- a/backend/settings/azure_ai_vision.py +++ b/backend/settings/azure_ai_vision.py @@ -2,7 +2,7 @@ class Settings(BaseSettings): - azure_ai_vision_endpoint: str = "https://.cognitiveservices.azure.com/" + azure_ai_vision_endpoint: str = "https://.cognitiveservices.azure.com" azure_ai_vision_api_key: str = "" model_config = SettingsConfigDict( diff --git a/backend/settings/azure_openai.py b/backend/settings/azure_openai.py index 319eeb6..cf8d10a 100644 --- a/backend/settings/azure_openai.py +++ b/backend/settings/azure_openai.py @@ -2,7 +2,7 @@ class Settings(BaseSettings): - azure_openai_endpoint: str = "https://.openai.azure.com/" + azure_openai_endpoint: str = "https://.openai.azure.com" azure_openai_api_key: str = "" azure_openai_api_version: str = "2024-02-01" azure_openai_embedding_model: str = "text-embedding-ada-002" diff --git a/docs/README.md b/docs/README.md index 7e409e3..a66bbb0 100644 --- a/docs/README.md +++ b/docs/README.md @@ -3,11 +3,44 @@ ## Docker ```shell -# Build the Docker image -make docker-build - -# Dry run the Docker container with default settings -make --dry-run docker-run DOCKER_COMMAND="python main.py backend --port 8888 --debug" +# Build the Docker image (optional) +make docker-build DOCKER_IMAGE_COMPONENT=backend GIT_TAG=latest +make docker-build DOCKER_IMAGE_COMPONENT=frontend GIT_TAG=latest + +# Create environment files for each service +cp {NAME}.env.sample {NAME}.env + +# Run the Docker container for the backend +docker run --rm \ + --publish 8888:8888 \ + --volume ${PWD}/azure_ai_document_intelligence.env:/app/azure_ai_document_intelligence.env \ + --volume ${PWD}/azure_ai_vision.env:/app/azure_ai_vision.env \ + --volume ${PWD}/azure_event_grid.env:/app/azure_event_grid.env \ + --volume ${PWD}/azure_openai.env:/app/azure_openai.env \ + --volume ${PWD}/azure_storage_blob.env:/app/azure_storage_blob.env \ + --volume ${PWD}/azure_storage_queue.env:/app/azure_storage_queue.env \ + ks6088ts/azure-ai-services-solutions:backend-latest \ + python main.py backend \ + --port 8888 \ + --debug + +# Access the backend: http://localhost:8888 + +# Run ngrok to expose the backend (for testing purposes only) +ngrok http 8888 +NGROK_URL="" + +# Run the Docker container for the frontend +docker run --rm \ + --publish 8501:8501 \ + --volume ${PWD}/azure_ai_speech.env:/app/azure_ai_speech.env \ + ks6088ts/azure-ai-services-solutions:frontend-latest \ + streamlit run main.py --server.port=8501 --server.address=0.0.0.0 -- frontend \ + --solution-name sandbox \ + --backend-url ${NGROK_URL} \ + --debug + +# Access the frontend: http://localhost:8501 ``` # References diff --git a/frontend/solutions/azure_ai_vision.py b/frontend/solutions/azure_ai_vision.py index 68f0540..fd0383a 100644 --- a/frontend/solutions/azure_ai_vision.py +++ b/frontend/solutions/azure_ai_vision.py @@ -36,7 +36,7 @@ def start( bytes_data = file_uploader.getvalue() response = asyncio.run( http_post_file( - url=urljoin(base=backend_url, url="/azure_ai_vision/image/analyze/"), + url=urljoin(base=backend_url, url="/azure_ai_vision/image/analyze"), data_bytes_io=BytesIO(bytes_data), ) ) diff --git a/frontend/solutions/document_intelligence.py b/frontend/solutions/document_intelligence.py index c149fd2..63a1ae2 100644 --- a/frontend/solutions/document_intelligence.py +++ b/frontend/solutions/document_intelligence.py @@ -36,7 +36,7 @@ def start( bytes_data = file_uploader.getvalue() response = asyncio.run( http_post_file( - url=urljoin(base=backend_url, url="/azure_ai_document_intelligence/analyze_document/"), + url=urljoin(base=backend_url, url="/azure_ai_document_intelligence/analyze_document"), data_bytes_io=BytesIO(bytes_data), ) ) diff --git a/main.py b/main.py index b69bfd0..95804a2 100644 --- a/main.py +++ b/main.py @@ -42,7 +42,7 @@ def backend( @app.command() def frontend( solution_name: Annotated[str, typer.Option(help="Solution name")] = "SANDBOX", - backend_url: Annotated[str, typer.Option(help="Backend URL")] = "http://localhost:8000/", + backend_url: Annotated[str, typer.Option(help="Backend URL")] = "http://localhost:8000", debug: Annotated[bool, typer.Option(help="Enable debug mode")] = False, ): from frontend.entrypoint import start