From d560b9a4139d7c09fbf9959fcdc996a6e5f7eab3 Mon Sep 17 00:00:00 2001 From: Chinedum Echeta <60179183+cecheta@users.noreply.github.com> Date: Fri, 5 Apr 2024 09:18:11 +0100 Subject: [PATCH] Functional tests for /azure_byod (#589) * Functional tests for /azure_byod * Split tests into two * Refactor + README --- .../functional/backend_api/app_config.py | 19 ++- code/tests/functional/backend_api/common.py | 42 ++++++ code/tests/functional/backend_api/conftest.py | 79 ---------- .../functional/backend_api/tests/README.md | 56 +++++++ .../backend_api/tests/with_data/__init__.py | 0 .../backend_api/tests/with_data/conftest.py | 40 +++++ .../tests/with_data/test_azure_byod.py | 139 ++++++++++++++++++ .../{ => tests/with_data}/test_config.py | 0 .../with_data}/test_conversation_custom.py | 0 .../tests/without_data/__init__.py | 0 .../tests/without_data/conftest.py | 39 +++++ .../test_azure_byod_without_data.py | 116 +++++++++++++++ 12 files changed, 445 insertions(+), 85 deletions(-) create mode 100644 code/tests/functional/backend_api/common.py create mode 100644 code/tests/functional/backend_api/tests/README.md create mode 100644 code/tests/functional/backend_api/tests/with_data/__init__.py create mode 100644 code/tests/functional/backend_api/tests/with_data/conftest.py create mode 100644 code/tests/functional/backend_api/tests/with_data/test_azure_byod.py rename code/tests/functional/backend_api/{ => tests/with_data}/test_config.py (100%) rename code/tests/functional/backend_api/{ => tests/with_data}/test_conversation_custom.py (100%) create mode 100644 code/tests/functional/backend_api/tests/without_data/__init__.py create mode 100644 code/tests/functional/backend_api/tests/without_data/conftest.py create mode 100644 code/tests/functional/backend_api/tests/without_data/test_azure_byod_without_data.py diff --git a/code/tests/functional/backend_api/app_config.py b/code/tests/functional/backend_api/app_config.py index 828e15a8b..6e29f4f61 100644 --- a/code/tests/functional/backend_api/app_config.py +++ b/code/tests/functional/backend_api/app_config.py @@ -1,33 +1,40 @@ import logging import os -from typing import Any, Dict class AppConfig: - config: Dict[str, Any] = { + config: dict[str, str | None] = { "AZURE_SPEECH_SERVICE_KEY": "some-azure-speech-service-key", "AZURE_SPEECH_SERVICE_REGION": "some-azure-speech-service-region", "APPINSIGHTS_ENABLED": "False", "AZURE_OPENAI_API_KEY": "some-azure-openai-api-key", + "AZURE_OPENAI_API_VERSION": "2024-02-01", + "AZURE_SEARCH_INDEX": "some-azure-search-index", "AZURE_SEARCH_KEY": "some-azure-search-key", "AZURE_CONTENT_SAFETY_KEY": "some-content_safety-key", "AZURE_OPENAI_EMBEDDING_MODEL": "some-embedding-model", "AZURE_OPENAI_MODEL": "some-openai-model", "AZURE_SEARCH_CONVERSATIONS_LOG_INDEX": "some-log-index", + "AZURE_OPENAI_STREAM": "True", "LOAD_CONFIG_FROM_BLOB_STORAGE": "False", "TIKTOKEN_CACHE_DIR": f"{os.path.dirname(os.path.realpath(__file__))}/resources", + # These values are set directly within EnvHelper, adding them here ensures + # that they are removed from the environment when remove_from_environment() runs + "OPENAI_API_TYPE": None, + "OPENAI_API_KEY": None, + "OPENAI_API_VERSION": None, } - def __init__(self, config_overrides: Dict[str, Any] = {}) -> None: + def __init__(self, config_overrides: dict[str, str | None] = {}) -> None: self.config = self.config | config_overrides - def set(self, key: str, value: Any) -> None: + def set(self, key: str, value: str | None) -> None: self.config[key] = value - def get(self, key: str) -> Any: + def get(self, key: str) -> str | None: return self.config[key] - def get_all(self) -> Dict[str, Any]: + def get_all(self) -> dict[str, str | None]: return self.config def apply_to_environment(self) -> None: diff --git a/code/tests/functional/backend_api/common.py b/code/tests/functional/backend_api/common.py new file mode 100644 index 000000000..2964641b8 --- /dev/null +++ b/code/tests/functional/backend_api/common.py @@ -0,0 +1,42 @@ +import logging +import socket +import threading +import time +import requests +from threading import Thread +from create_app import create_app + + +def start_app(app_port: int) -> Thread: + logging.info(f"Starting application on port {app_port}") + app = create_app() + app_process = threading.Thread(target=lambda: app.run(port=app_port), daemon=True) + app_process.start() + wait_for_app(app_port) + logging.info("Application started") + return app_process + + +def wait_for_app(port: int, initial_check_delay: int = 2): + attempts = 0 + time.sleep(initial_check_delay) + while attempts < 10: + try: + response = requests.get(f"http://localhost:{port}/api/config") + if response.status_code == 200: + return + except Exception: + pass + + attempts += 1 + time.sleep(1) + + raise Exception("App failed to start") + + +def get_free_port() -> int: + s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM) + s.bind(("localhost", 0)) + _, port = s.getsockname() + s.close() + return port diff --git a/code/tests/functional/backend_api/conftest.py b/code/tests/functional/backend_api/conftest.py index a20bf9522..b4e5937bb 100644 --- a/code/tests/functional/backend_api/conftest.py +++ b/code/tests/functional/backend_api/conftest.py @@ -1,15 +1,8 @@ -import logging -import socket import ssl -import threading -import time import pytest from pytest_httpserver import HTTPServer -import requests from tests.functional.backend_api.app_config import AppConfig -from threading import Thread import trustme -from create_app import create_app @pytest.fixture(scope="session") @@ -43,42 +36,6 @@ def httpclient_ssl_context(ca): return ssl.create_default_context(cafile=ca_temp_path) -@pytest.fixture(scope="session") -def app_port() -> int: - logging.info("Getting free port") - return get_free_port() - - -@pytest.fixture(scope="session") -def app_url(app_port: int) -> str: - return f"http://localhost:{app_port}" - - -@pytest.fixture(scope="session") -def app_config(make_httpserver, ca): - logging.info("Creating APP CONFIG") - with ca.cert_pem.tempfile() as ca_temp_path: - app_config = AppConfig( - { - "AZURE_OPENAI_ENDPOINT": f"https://localhost:{make_httpserver.port}", - "AZURE_SEARCH_SERVICE": f"https://localhost:{make_httpserver.port}", - "AZURE_CONTENT_SAFETY_ENDPOINT": f"https://localhost:{make_httpserver.port}", - "SSL_CERT_FILE": ca_temp_path, - "CURL_CA_BUNDLE": ca_temp_path, - } - ) - logging.info(f"Created app config: {app_config.get_all()}") - yield app_config - - -@pytest.fixture(scope="session", autouse=True) -def manage_app(app_port: int, app_config: AppConfig): - app_config.apply_to_environment() - start_app(app_port) - yield - app_config.remove_from_environment() - - @pytest.fixture(scope="function", autouse=True) def setup_default_mocking(httpserver: HTTPServer, app_config: AppConfig): httpserver.expect_request( @@ -154,39 +111,3 @@ def setup_default_mocking(httpserver: HTTPServer, app_config: AppConfig): yield httpserver.check() - - -def start_app(app_port: int) -> Thread: - logging.info(f"Starting application on port {app_port}") - app = create_app() - app_process = threading.Thread(target=lambda: app.run(port=app_port)) - app_process.daemon = True - app_process.start() - wait_for_app(app_port) - logging.info("Application started") - return app_process - - -def wait_for_app(port: int, initial_check_delay: int = 10): - attempts = 0 - time.sleep(initial_check_delay) - while attempts < 10: - try: - response = requests.get(f"http://localhost:{port}/api/config") - if response.status_code == 200: - return - except Exception: - pass - - attempts += 1 - time.sleep(1) - - raise Exception("App failed to start") - - -def get_free_port() -> int: - s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM) - s.bind(("localhost", 0)) - _, port = s.getsockname() - s.close() - return port diff --git a/code/tests/functional/backend_api/tests/README.md b/code/tests/functional/backend_api/tests/README.md new file mode 100644 index 000000000..f63a5c216 --- /dev/null +++ b/code/tests/functional/backend_api/tests/README.md @@ -0,0 +1,56 @@ +# Backend API Tests + +At present, there are two sets of tests: `with_data` and `without_data`. +Each set of tests starts its own instance of the backend API on a different port. +The difference between the two is the environment variables, namely the lack of the +`AZURE_SEARCH_SERVICE` variable for the `without_data` tests. + +When adding new tests, first check to see if it is possible to add the tests to an +existing set of tests, rather than creating a new set, as this removes the need for +starting up a new instance of the application on another port. + +New environment variables common to all tests can be directly added to the `config` +dict in [app_config.py](../app_config.py), while variables only needed for one set +of tests can be added to the `app_config` fixture in the respective `conftest.py` +file, e.g. [./with_data/conftest.py](./with_data/conftest.py). + +```py +@pytest.fixture(scope="package") +def app_config(make_httpserver, ca): + logging.info("Creating APP CONFIG") + with ca.cert_pem.tempfile() as ca_temp_path: + app_config = AppConfig( + { + "AZURE_OPENAI_ENDPOINT": f"https://localhost:{make_httpserver.port}/", + "AZURE_SEARCH_SERVICE": f"https://localhost:{make_httpserver.port}/", + "AZURE_CONTENT_SAFETY_ENDPOINT": f"https://localhost:{make_httpserver.port}/", + "SSL_CERT_FILE": ca_temp_path, + "CURL_CA_BUNDLE": ca_temp_path, + "NEW_ENV_VAR": "VALUE", + } + ) + logging.info(f"Created app config: {app_config.get_all()}") + yield app_config +``` + +To remove an environment variable from the default defined in the `AppConfig` class, +set its value to `None`. + +```py +@pytest.fixture(scope="package") +def app_config(make_httpserver, ca): + logging.info("Creating APP CONFIG") + with ca.cert_pem.tempfile() as ca_temp_path: + app_config = AppConfig( + { + "AZURE_OPENAI_ENDPOINT": f"https://localhost:{make_httpserver.port}/", + "AZURE_SEARCH_SERVICE": f"https://localhost:{make_httpserver.port}/", + "AZURE_CONTENT_SAFETY_ENDPOINT": f"https://localhost:{make_httpserver.port}/", + "SSL_CERT_FILE": ca_temp_path, + "CURL_CA_BUNDLE": ca_temp_path, + "ENV_VAR_TO_REMOVE": None, + } + ) + logging.info(f"Created app config: {app_config.get_all()}") + yield app_config +``` diff --git a/code/tests/functional/backend_api/tests/with_data/__init__.py b/code/tests/functional/backend_api/tests/with_data/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/code/tests/functional/backend_api/tests/with_data/conftest.py b/code/tests/functional/backend_api/tests/with_data/conftest.py new file mode 100644 index 000000000..a8a6d5d99 --- /dev/null +++ b/code/tests/functional/backend_api/tests/with_data/conftest.py @@ -0,0 +1,40 @@ +import logging +import pytest +from tests.functional.backend_api.app_config import AppConfig +from tests.functional.backend_api.common import get_free_port, start_app + + +@pytest.fixture(scope="package") +def app_port() -> int: + logging.info("Getting free port") + return get_free_port() + + +@pytest.fixture(scope="package") +def app_url(app_port: int) -> str: + return f"http://localhost:{app_port}" + + +@pytest.fixture(scope="package") +def app_config(make_httpserver, ca): + logging.info("Creating APP CONFIG") + with ca.cert_pem.tempfile() as ca_temp_path: + app_config = AppConfig( + { + "AZURE_OPENAI_ENDPOINT": f"https://localhost:{make_httpserver.port}/", + "AZURE_SEARCH_SERVICE": f"https://localhost:{make_httpserver.port}/", + "AZURE_CONTENT_SAFETY_ENDPOINT": f"https://localhost:{make_httpserver.port}/", + "SSL_CERT_FILE": ca_temp_path, + "CURL_CA_BUNDLE": ca_temp_path, + } + ) + logging.info(f"Created app config: {app_config.get_all()}") + yield app_config + + +@pytest.fixture(scope="package", autouse=True) +def manage_app(app_port: int, app_config: AppConfig): + app_config.apply_to_environment() + start_app(app_port) + yield + app_config.remove_from_environment() diff --git a/code/tests/functional/backend_api/tests/with_data/test_azure_byod.py b/code/tests/functional/backend_api/tests/with_data/test_azure_byod.py new file mode 100644 index 000000000..ea0bd80bb --- /dev/null +++ b/code/tests/functional/backend_api/tests/with_data/test_azure_byod.py @@ -0,0 +1,139 @@ +import json +import pytest +from pytest_httpserver import HTTPServer +import requests +from string import Template + +from tests.functional.backend_api.request_matching import ( + RequestMatcher, + verify_request_made, +) +from tests.functional.backend_api.app_config import AppConfig + +pytestmark = pytest.mark.functional + +path = "/api/conversation/azure_byod" +body = { + "conversation_id": "123", + "messages": [ + {"role": "user", "content": "Hello"}, + {"role": "assistant", "content": "Hi, how can I help?"}, + {"role": "user", "content": "What is the meaning of life?"}, + ], +} + + +@pytest.fixture(scope="function", autouse=True) +def setup_default_mocking(httpserver: HTTPServer, app_config: AppConfig): + httpserver.expect_request( + f"/openai/deployments/{app_config.get('AZURE_OPENAI_MODEL')}/chat/completions", + method="POST", + ).respond_with_data( + Template( + r"""data: {"id":"92f715be-cfc4-4ae6-80f8-c86b7955f6af","model":"$model","created":1712077271,"object":"extensions.chat.completion.chunk","choices":[{"index":0,"delta":{"role":"assistant","context":{"citations":[{"content":"document","title":"/documents/doc.pdf","url":null,"filepath":null,"chunk_id":"0"}],"intent":"[\"intent\"]"}},"end_turn":false,"finish_reason":null}]} + +data: {"id":"92f715be-cfc4-4ae6-80f8-c86b7955f6af","model":"$model","created":1712077271,"object":"extensions.chat.completion.chunk","choices":[{"index":0,"delta":{"content":"42 is the meaning of life"},"end_turn":false,"finish_reason":null}],"system_fingerprint":"fp_68a7d165bf"} + +data: {"id":"92f715be-cfc4-4ae6-80f8-c86b7955f6af","model":"$model","created":1712077271,"object":"extensio@ns.chat.completion.chunk","choices":[{"index":0,"delta":{},"end_turn":true,"finish_reason":"stop"}]} + +data: [DONE] +""" + ).substitute(model=app_config.get("AZURE_OPENAI_MODEL")) + ) + + yield + + httpserver.check() + + +def test_azure_byod_responds_successfully_when_streaming( + app_url: str, app_config: AppConfig, httpserver: HTTPServer +): + # when + response = requests.post(f"{app_url}{path}", json=body) + + # then + assert response.status_code == 200 + assert response.headers["Content-Type"] == "application/json-lines" + + response_lines = response.text.splitlines() + assert len(response_lines) == 3 + + final_response_json = json.loads(response_lines[-1]) + assert final_response_json == { + "id": "92f715be-cfc4-4ae6-80f8-c86b7955f6af", + "model": app_config.get("AZURE_OPENAI_MODEL"), + "created": 1712077271, + "object": "extensions.chat.completion.chunk", + "choices": [ + { + "messages": [ + { + "content": r'{"citations": [{"content": "document", "title": "/documents/doc.pdf", "url": null, "filepath": null, "chunk_id": "0"}], "intent": "[\"intent\"]"}', + "end_turn": False, + "role": "tool", + }, + { + "content": "42 is the meaning of life", + "end_turn": True, + "role": "assistant", + }, + ] + } + ], + } + + +def test_post_makes_correct_call_to_azure_openai( + app_url: str, app_config: AppConfig, httpserver: HTTPServer +): + # when + requests.post(f"{app_url}{path}", json=body) + + # then + verify_request_made( + mock_httpserver=httpserver, + request_matcher=RequestMatcher( + path=f"/openai/deployments/{app_config.get('AZURE_OPENAI_MODEL')}/chat/completions", + method="POST", + json={ + "messages": body["messages"], + "temperature": 0.0, + "max_tokens": 1000, + "top_p": 1.0, + "stop": None, + "stream": True, + "data_sources": [ + { + "type": "azure_search", + "parameters": { + "endpoint": app_config.get("AZURE_SEARCH_SERVICE"), + "index_name": app_config.get("AZURE_SEARCH_INDEX"), + "fields_mapping": { + "content_fields": ["content"], + "title_field": "title", + "url_field": "url", + "filepath_field": "filepath", + }, + "in_scope": True, + "top_n_documents": 5, + "query_type": "simple", + "semantic_configuration": "", + "role_information": "You are an AI assistant that helps people find information.", + "authentication": { + "type": "api_key", + "key": app_config.get("AZURE_SEARCH_KEY"), + }, + }, + } + ], + }, + headers={ + "Content-Type": "application/json", + "x-ms-useragent": "GitHubSampleWebApp/PublicAPI/1.0.0", + "api-key": app_config.get("AZURE_OPENAI_API_KEY"), + }, + query_string="api-version=2024-02-01", + times=1, + ), + ) diff --git a/code/tests/functional/backend_api/test_config.py b/code/tests/functional/backend_api/tests/with_data/test_config.py similarity index 100% rename from code/tests/functional/backend_api/test_config.py rename to code/tests/functional/backend_api/tests/with_data/test_config.py diff --git a/code/tests/functional/backend_api/test_conversation_custom.py b/code/tests/functional/backend_api/tests/with_data/test_conversation_custom.py similarity index 100% rename from code/tests/functional/backend_api/test_conversation_custom.py rename to code/tests/functional/backend_api/tests/with_data/test_conversation_custom.py diff --git a/code/tests/functional/backend_api/tests/without_data/__init__.py b/code/tests/functional/backend_api/tests/without_data/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/code/tests/functional/backend_api/tests/without_data/conftest.py b/code/tests/functional/backend_api/tests/without_data/conftest.py new file mode 100644 index 000000000..0b73da0d6 --- /dev/null +++ b/code/tests/functional/backend_api/tests/without_data/conftest.py @@ -0,0 +1,39 @@ +import logging +import pytest +from tests.functional.backend_api.app_config import AppConfig +from tests.functional.backend_api.common import get_free_port, start_app + + +@pytest.fixture(scope="package") +def app_port() -> int: + logging.info("Getting free port") + return get_free_port() + + +@pytest.fixture(scope="package") +def app_url(app_port: int) -> str: + return f"http://localhost:{app_port}" + + +@pytest.fixture(scope="package") +def app_config(make_httpserver, ca): + logging.info("Creating APP CONFIG") + with ca.cert_pem.tempfile() as ca_temp_path: + app_config = AppConfig( + { + "AZURE_OPENAI_ENDPOINT": f"https://localhost:{make_httpserver.port}/", + "AZURE_CONTENT_SAFETY_ENDPOINT": f"https://localhost:{make_httpserver.port}/", + "SSL_CERT_FILE": ca_temp_path, + "CURL_CA_BUNDLE": ca_temp_path, + } + ) + logging.info(f"Created app config: {app_config.get_all()}") + yield app_config + + +@pytest.fixture(scope="package", autouse=True) +def manage_app(app_port: int, app_config: AppConfig): + app_config.apply_to_environment() + start_app(app_port) + yield + app_config.remove_from_environment() diff --git a/code/tests/functional/backend_api/tests/without_data/test_azure_byod_without_data.py b/code/tests/functional/backend_api/tests/without_data/test_azure_byod_without_data.py new file mode 100644 index 000000000..92cb32772 --- /dev/null +++ b/code/tests/functional/backend_api/tests/without_data/test_azure_byod_without_data.py @@ -0,0 +1,116 @@ +import json +import pytest +from pytest_httpserver import HTTPServer +import requests +from string import Template + +from tests.functional.backend_api.request_matching import ( + RequestMatcher, + verify_request_made, +) +from tests.functional.backend_api.app_config import AppConfig + +pytestmark = pytest.mark.functional + +path = "/api/conversation/azure_byod" +body = { + "conversation_id": "123", + "messages": [ + {"role": "user", "content": "Hello"}, + {"role": "assistant", "content": "Hi, how can I help?"}, + {"role": "user", "content": "What is the meaning of life?"}, + ], +} + + +@pytest.fixture(scope="function", autouse=True) +def setup_default_mocking(httpserver: HTTPServer, app_config: AppConfig): + httpserver.expect_request( + f"/openai/deployments/{app_config.get('AZURE_OPENAI_MODEL')}/chat/completions", + method="POST", + ).respond_with_data( + Template( + """data: {"id":"","object":"","created":0,"model":"","prompt_filter_results":[{"prompt_index":0,"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}}}],"choices":[]} + +data: {"id":"chatcmpl-99tA6ZsoSvjQ0tGV3nGBCdBuEg3KJ","object":"chat.completion.chunk","created":1712144022,"model":"$model","choices":[{"finish_reason":null,"index":0,"delta":{"role":"assistant","content":""},"content_filter_results":{},"logprobs":null}],"system_fingerprint":"fp_68a7d165bf"} + +data: {"id":"chatcmpl-99tA6ZsoSvjQ0tGV3nGBCdBuEg3KJ","object":"chat.completion.chunk","created":1712144022,"model":"$model","choices":[{"finish_reason":null,"index":0,"delta":{"content":"42 is the meaning of life"},"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}},"logprobs":null}],"system_fingerprint":"fp_68a7d165bf"} + +data: {"id":"chatcmpl-99tA6ZsoSvjQ0tGV3nGBCdBuEg3KJ","object":"chat.completion.chunk","created":1712144022,"model":"$model","choices":[{"finish_reason":"stop","index":0,"delta":{"content":null},"content_filter_results":{},"logprobs":null}],"system_fingerprint":"fp_68a7d165bf"} + +data: [DONE] +""" + ).substitute(model=app_config.get("AZURE_OPENAI_MODEL")), + ) + + yield + + httpserver.check() + + +def test_azure_byod_responds_successfully_when_streaming( + app_url: str, app_config: AppConfig, httpserver: HTTPServer +): + # when + response = requests.post(f"{app_url}{path}", json=body) + + # then + assert response.status_code == 200 + assert response.headers["Content-Type"] == "application/json-lines" + + response_lines = response.text.splitlines() + assert len(response_lines) == 2 + + final_response_json = json.loads(response_lines[-1]) + assert final_response_json == { + "id": "chatcmpl-99tA6ZsoSvjQ0tGV3nGBCdBuEg3KJ", + "model": app_config.get("AZURE_OPENAI_MODEL"), + "created": 1712144022, + "object": "chat.completion.chunk", + "choices": [ + { + "messages": [ + { + "content": "42 is the meaning of life", + "role": "assistant", + }, + ] + } + ], + } + + +def test_post_makes_correct_call_to_azure_openai( + app_url: str, app_config: AppConfig, httpserver: HTTPServer +): + # when + requests.post(f"{app_url}{path}", json=body) + + verify_request_made( + mock_httpserver=httpserver, + request_matcher=RequestMatcher( + path=f"/openai/deployments/{app_config.get('AZURE_OPENAI_MODEL')}/chat/completions", + method="POST", + json={ + "messages": [ + { + "role": "system", + "content": "You are an AI assistant that helps people find information.", + }, + ] + + body["messages"], + "model": app_config.get("AZURE_OPENAI_MODEL"), + "temperature": 0.0, + "max_tokens": 1000, + "top_p": 1.0, + "stop": None, + "stream": True, + }, + headers={ + "Content-Type": "application/json", + "api-key": app_config.get("AZURE_OPENAI_API_KEY"), + }, + query_string="api-version=2024-02-01", + times=1, + ), + )