From b3eb3822e6ce84623046bea183453dc8769f3023 Mon Sep 17 00:00:00 2001 From: Jes360 Date: Wed, 3 Sep 2025 17:05:21 +1000 Subject: [PATCH 1/5] Caching layer: Redis + /status, /clear, /stats + TTL + Hit/Miss --- .env.example | 6 +- app/api/middleware/cache_middleware.py | 60 +++++++++++++ app/api/v1/cache.py | 37 ++++++++ app/api/v1/graph.py | 34 +++++++ app/core/config.py | 37 +++++++- app/main.py | 27 +++++- app/services/cache_service.py | 66 ++++++++++++++ app/services/graph_service.py | 70 +++++++++++++++ pyproject.toml | 4 + uv.lock | 120 ++++++++++++++++++++++++- 10 files changed, 454 insertions(+), 7 deletions(-) create mode 100644 app/api/middleware/cache_middleware.py create mode 100644 app/api/v1/cache.py create mode 100644 app/api/v1/graph.py create mode 100644 app/services/cache_service.py create mode 100644 app/services/graph_service.py diff --git a/.env.example b/.env.example index c04c89c..8fa1e64 100644 --- a/.env.example +++ b/.env.example @@ -7,7 +7,7 @@ VERSION = "0.1.0" # Azure AD Configuration AZURE_CLIENT_ID = "your_client_id" -AZURE_CLIENT_SECRET = "your_client_secret" +AZURE_CLIENT_SECRET = "your_client_secrect" AZURE_TENANT_ID = "your_tenant_id" # Logging @@ -19,4 +19,6 @@ ALLOWED_HOSTS = "["*"]" API_PREFIX = "/api/v1" # Microsoft Graph API -GRAPH_API_BASE_URL = "https://graph.microsoft.com/v1.0" \ No newline at end of file +GRAPH_API_BASE_URL = "https://graph.microsoft.com/v1.0" + + diff --git a/app/api/middleware/cache_middleware.py b/app/api/middleware/cache_middleware.py new file mode 100644 index 0000000..8e2e3f1 --- /dev/null +++ b/app/api/middleware/cache_middleware.py @@ -0,0 +1,60 @@ +from starlette.types import ASGIApp, Receive, Scope, Send +from fastapi import Request, Response +from app.services.cache_service import cache_service +from app.core.config import settings +import json + +class CacheMiddleware: + """Middleware to cache GET responses.""" + + def __init__(self, app: ASGIApp): + self.app = app + + async def __call__(self, scope: Scope, receive: Receive, send: Send): + # Only cache HTTP GET requests + if scope["type"] == "http" and scope["method"] == "GET": + request = Request(scope, receive) + key = request.url.path + "?" + (request.url.query or "") + if settings.CACHE_ENABLED: + # Try to fetch from cache + cached = await cache_service.get(key) + if cached: + # Return cached response + headers = {"content-type": "application/json"} + response = Response(content=cached, status_code=200, headers=headers) + await response(scope, receive, send) + return + + # Capture the response + responder = _ResponseCatcher(self.app, key) + await responder(scope, receive, send) + return + + # Non-GET or caching disabled: continue normally + await self.app(scope, receive, send) + + +class _ResponseCatcher: + """Helper to capture response body and cache it.""" + + def __init__(self, app: ASGIApp, key: str): + self.app = app + self.key = key + self.body = b"" + self.status_code = 200 + self.headers = {} + + async def __call__(self, scope: Scope, receive: Receive, send: Send): + async def send_wrapper(message): + if message["type"] == "http.response.start": + self.status_code = message["status"] + self.headers = dict(message.get("headers", [])) + elif message["type"] == "http.response.body": + self.body += message.get("body", b"") + await send(message) + + await self.app(scope, receive, send_wrapper) + + # Cache successful GET responses + if self.status_code == 200: + await cache_service.set(self.key, self.body.decode(), settings.CACHE_TTL_DEFAULT) diff --git a/app/api/v1/cache.py b/app/api/v1/cache.py new file mode 100644 index 0000000..da83a69 --- /dev/null +++ b/app/api/v1/cache.py @@ -0,0 +1,37 @@ +from fastapi import APIRouter, HTTPException +from app.services.cache_service import cache_service +from app.core.config import settings + +router = APIRouter() +PREFIX = settings.API_PREFIX + "/cache" + +@router.get("/status") +async def cache_status(): + """ + Get cache status (enabled flag and Redis connection health). + """ + enabled = settings.CACHE_ENABLED + try: + pong = await cache_service.redis.ping() + healthy = pong is True + except Exception as e: + healthy = False + return {"enabled": enabled, "healthy": healthy} + +@router.post("/clear") +async def cache_clear(): + """ + Clear the entire cache. + """ + if not settings.CACHE_ENABLED: + raise HTTPException(status_code=400, detail="Caching is disabled") + await cache_service.clear() + return {"status": "cleared"} + +@router.get("/stats") +async def cache_stats(): + """ + Retrieve cache hit/miss statistics. + """ + stats = cache_service.stats() + return stats diff --git a/app/api/v1/graph.py b/app/api/v1/graph.py new file mode 100644 index 0000000..cdf11a6 --- /dev/null +++ b/app/api/v1/graph.py @@ -0,0 +1,34 @@ +from fastapi import APIRouter, HTTPException +from pydantic import BaseModel +from typing import Optional +from app.services.graph_service import GraphService + +router = APIRouter() +graph_service = GraphService() + +class UsersRequest(BaseModel): + token: str + top: Optional[int] = None + +class MeRequest(BaseModel): + token: str + +@router.post("/graph/users") +async def graph_users(req: UsersRequest): + try: + result = await graph_service.list_users(req.token, req.top) + except Exception as e: + raise HTTPException(status_code=502, detail=f"Unhandled exception: {e}") + if "error" in result: + raise HTTPException(status_code=502, detail=result["error"]) + return result + +@router.post("/graph/me") +async def graph_me(req: MeRequest): + try: + result = await graph_service.get_me(req.token) + except Exception as e: + raise HTTPException(status_code=502, detail=f"Unhandled exception: {e}") + if "error" in result: + raise HTTPException(status_code=502, detail=result["error"]) + return result diff --git a/app/core/config.py b/app/core/config.py index c8c0858..b3337e1 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -36,13 +36,46 @@ class Settings(BaseSettings): default="https://graph.microsoft.com/v1.0", description="Base URL for Microsoft Graph API", ) + # Redis Configuration + REDIS_URL: Optional[str] = Field( + default="redis://localhost:6379/0", + description="Redis connection URL", + ) + REDIS_HOST: Optional[str] = Field( + default="localhost", + description="Redis host", + ) + REDIS_PORT: Optional[int] = Field( + default=6379, + description="Redis port", + ) + REDIS_DB: Optional[int] = Field( + default=0, + description="Redis database index", + ) + REDIS_PASSWORD: Optional[str] = Field( + default=None, + description="Redis password", + ) + + # Cache Settings + CACHE_ENABLED: bool = Field( + default=True, + description="Enable or disable caching", + ) + CACHE_TTL_DEFAULT: int = Field( + default=300, + description="Default TTL (seconds) for cache entries", + ) + CACHE_KEY_PREFIX: str = Field( + default="autoaudit", + description="Prefix for all cache keys", + ) class Config: """Pydantic configuration.""" - env_file = ".env" env_file_encoding = "utf-8" case_sensitive = True - settings = Settings() diff --git a/app/main.py b/app/main.py index c7d9916..c78b352 100644 --- a/app/main.py +++ b/app/main.py @@ -7,6 +7,10 @@ from app.core.config import settings from app.api.v1 import auth from app.utils.logger import logger +from app.api.v1 import cache +from app.api.middleware.cache_middleware import CacheMiddleware +from app.api.v1.graph import router as graph_router + def create_app() -> FastAPI: @@ -35,7 +39,7 @@ def configure_middleware(app: FastAPI, settings): allow_methods=["*"], allow_headers=["*"], ) - + app.add_middleware(CacheMiddleware) # Trusted host middleware app.add_middleware( TrustedHostMiddleware, @@ -105,6 +109,27 @@ async def health_check(): "status": "healthy", "version": settings.VERSION, } + +def configure_routing(app: FastAPI, settings): + # Authentication endpoints + app.include_router( + auth.router, + prefix=f"{settings.API_PREFIX}/auth", + tags=["Authentication"], + responses={404: {"description": "Not found"}}, + ) + + # Cache endpoints + app.include_router( + cache.router, + prefix=f"{settings.API_PREFIX}/cache", + tags=["Cache"], + ) + app.include_router( + graph_router, + prefix=f"{settings.API_PREFIX}", + tags=["Graph"], + ) app = create_app() diff --git a/app/services/cache_service.py b/app/services/cache_service.py new file mode 100644 index 0000000..5895ddc --- /dev/null +++ b/app/services/cache_service.py @@ -0,0 +1,66 @@ +import aioredis +import asyncio +from typing import Optional +from app.core.config import settings +from structlog import get_logger + +logger = get_logger() + +class CacheService: + """Redis-based cache service for AutoAudit API.""" + + def __init__(self): + # Initialize Redis connection pool + self.redis = aioredis.from_url( + settings.REDIS_URL, + encoding="utf-8", + decode_responses=True, + ) + # Stats + self.hits = 0 + self.misses = 0 + + async def get(self, key: str) -> Optional[str]: + """Retrieve a value from cache.""" + # Use namespaced cache keys to avoid collisions across environments/projects + value = await self.redis.get(f"{settings.CACHE_KEY_PREFIX}:{key}") + if value is None: + self.misses += 1 + logger.debug("Cache miss", key=key) + else: + self.hits += 1 + logger.debug("Cache hit", key=key) + return value + + async def set(self, key: str, value: str, ttl: Optional[int] = None) -> None: + """Set a value in cache with TTL.""" + expire = ttl or settings.CACHE_TTL_DEFAULT + await self.redis.set( + f"{settings.CACHE_KEY_PREFIX}:{key}", + value, + ex=expire, + ) + logger.debug("Cache set", key=key, ttl=expire) + + async def delete(self, key: str) -> None: + """Delete a key from cache.""" + await self.redis.delete(f"{settings.CACHE_KEY_PREFIX}:{key}") + logger.debug("Cache delete", key=key) + + async def clear(self) -> None: + """Clear the entire cache (use with caution).""" + await self.redis.flushdb() + logger.warning("Cache cleared") + + def stats(self) -> dict: + """Return cache hit/miss statistics.""" + total = self.hits + self.misses + hit_rate = (self.hits / total * 100) if total > 0 else 0.0 + return { + "hits": self.hits, + "misses": self.misses, + "hit_rate": f"{hit_rate:.2f}%", + } + +# Instantiate a singleton service +cache_service = CacheService() diff --git a/app/services/graph_service.py b/app/services/graph_service.py new file mode 100644 index 0000000..fc0994a --- /dev/null +++ b/app/services/graph_service.py @@ -0,0 +1,70 @@ + +import json +import httpx +from typing import Dict, Any, Optional +from app.core.config import settings +from app.services.cache_service import cache_service +from app.utils.logger import logger + +class GraphService: + def __init__(self): + self.base = settings.GRAPH_API_BASE_URL.rstrip("/") + + def _users_cache_key(self, token: str, top: Optional[int]) -> str: + suffix = f":top={top}" if top else "" + # Use only the first 16 chars of token to avoid storing full secrets + return f"graph_users:{token[:16]}{suffix}" + + def _me_cache_key(self, token: str) -> str: + + return f"graph_me:{token[:16]}" + + async def list_users(self, token: str, top: Optional[int] = None) -> Dict[str, Any]: + key = self._users_cache_key(token, top) + cached = await cache_service.get(key) + if cached: + try: + data = json.loads(cached) + logger.info("Users fetched from cache", count=len(data.get("value", []))) + return {"source": "cache", "data": data} + except Exception: + logger.warning("Users cache decode failed; calling Graph", key=key) + + headers = {"Authorization": f"Bearer {token}"} + params = {"$top": str(top)} if top else None + async with httpx.AsyncClient(verify=False) as client: + resp = await client.get(f"{self.base}/users", headers=headers, params=params) + + if resp.status_code == 200: + data = resp.json() + await cache_service.set(key, json.dumps(data)) + logger.info("Users fetched from Graph", count=len(data.get("value", []))) + return {"source": "graph", "data": data} + + logger.warning("Users call failed", status_code=resp.status_code, text=resp.text) + return {"source": "graph", "error": f"{resp.status_code}: {resp.text}"} + + async def get_me(self, token: str) -> Dict[str, Any]: + key = self._me_cache_key(token) + + cached = await cache_service.get(key) + if cached: + try: + data = json.loads(cached) + logger.info("Me fetched from cache", user_id=data.get("id")) + return {"source": "cache", "data": data} + except Exception: + logger.warning("Me cache decode failed; calling Graph", key=key) + + headers = {"Authorization": f"Bearer {token}"} + async with httpx.AsyncClient(verify=False) as client: + resp = await client.get(f"{self.base}/me", headers=headers) + + if resp.status_code == 200: + data = resp.json() + await cache_service.set(key, json.dumps(data)) + logger.info("Me fetched from Graph", user_id=data.get("id")) + return {"source": "graph", "data": data} + + logger.warning("Me call failed", status_code=resp.status_code, text=resp.text) + return {"source": "graph", "error": f"{resp.status_code}: {resp.text}"} diff --git a/pyproject.toml b/pyproject.toml index 396374a..d0aecca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,6 +12,8 @@ dependencies = [ "uvicorn>=0.35.0", "python-dotenv>=1.1.1", "structlog>=25.4.0", + "redis>=6.4.0", + "aioredis>=2.0.1", ] [project.optional-dependencies] @@ -22,6 +24,8 @@ dev = [ [tool.uv] dev-dependencies = [ + "fakeredis>=2.31.0", "pytest>=7.0.0", "pytest-asyncio>=0.21.0", + "pytest-redis>=3.1.3", ] diff --git a/uv.lock b/uv.lock index a2949a1..5cea08e 100644 --- a/uv.lock +++ b/uv.lock @@ -1,7 +1,20 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.10" +[[package]] +name = "aioredis" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-timeout" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/cf/9eb144a0b05809ffc5d29045c4b51039000ea275bc1268d0351c9e7dfc06/aioredis-2.0.1.tar.gz", hash = "sha256:eaa51aaf993f2d71f54b70527c440437ba65340588afeb786cd87c55c89cd98e", size = 111047, upload-time = "2021-12-27T20:28:17.557Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/a9/0da089c3ae7a31cbcd2dcf0214f6f571e1295d292b6139e2bac68ec081d0/aioredis-2.0.1-py3-none-any.whl", hash = "sha256:9ac0d0b3b485d293b8ca1987e6de8658d7dafcca1cddfcd1d506cae8cdebfdd6", size = 71243, upload-time = "2021-12-27T20:28:16.36Z" }, +] + [[package]] name = "annotated-types" version = "0.7.0" @@ -26,16 +39,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" }, ] +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, +] + [[package]] name = "autoaudit-api" -version = "1.0.0" +version = "0.1.0" source = { virtual = "." } dependencies = [ + { name = "aioredis" }, { name = "fastapi" }, { name = "httpx" }, { name = "pydantic" }, { name = "pydantic-settings" }, { name = "python-dotenv" }, + { name = "redis" }, { name = "structlog" }, { name = "uvicorn" }, ] @@ -48,12 +72,15 @@ dev = [ [package.dev-dependencies] dev = [ + { name = "fakeredis" }, { name = "pytest" }, { name = "pytest-asyncio" }, + { name = "pytest-redis" }, ] [package.metadata] requires-dist = [ + { name = "aioredis", specifier = ">=2.0.1" }, { name = "fastapi", specifier = ">=0.116.1" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "pydantic", specifier = ">=2.11.7" }, @@ -61,6 +88,7 @@ requires-dist = [ { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.0.0" }, { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.21.0" }, { name = "python-dotenv", specifier = ">=1.1.1" }, + { name = "redis", specifier = ">=6.4.0" }, { name = "structlog", specifier = ">=25.4.0" }, { name = "uvicorn", specifier = ">=0.35.0" }, ] @@ -68,8 +96,10 @@ provides-extras = ["dev"] [package.metadata.requires-dev] dev = [ + { name = "fakeredis", specifier = ">=2.31.0" }, { name = "pytest", specifier = ">=7.0.0" }, { name = "pytest-asyncio", specifier = ">=0.21.0" }, + { name = "pytest-redis", specifier = ">=3.1.3" }, ] [[package]] @@ -123,6 +153,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, ] +[[package]] +name = "fakeredis" +version = "2.31.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "redis" }, + { name = "sortedcontainers" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/10/c829c3475a26005ebf177057fdf54e2a29025ffc2232d02fb1ae8ac1de68/fakeredis-2.31.0.tar.gz", hash = "sha256:2942a7e7900fd9076ff9e608b9190a87315ac5a325a9ab8bfe288a2d985ecd23", size = 170163, upload-time = "2025-08-11T14:58:20.64Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/ef/25639beb5d93188b4b6502f601d8f97db77e362774f0183a48e995353c58/fakeredis-2.31.0-py3-none-any.whl", hash = "sha256:2584e57d93df4eb8e87931b29279902826d3caf77d06911106df4e066c2ad198", size = 117666, upload-time = "2025-08-11T14:58:19.03Z" }, +] + [[package]] name = "fastapi" version = "0.116.1" @@ -192,6 +236,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] +[[package]] +name = "mirakuru" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "psutil", marker = "sys_platform != 'cygwin'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f9/57/bfa1e5b904b18f669e03b7c6981bb92fb473b7da9c3b082a875e25bfaa8c/mirakuru-2.6.1.tar.gz", hash = "sha256:95d4f5a5ad406a625e9ca418f20f8e09386a35dad1ea30fd9073e0ae93f712c7", size = 26889, upload-time = "2025-07-02T07:18:41.234Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/ce/139df7074328119869a1041ce91c082d78287541cf867f9c4c85097c5d8b/mirakuru-2.6.1-py3-none-any.whl", hash = "sha256:4be0bfd270744454fa0c0466b8127b66bd55f4decaf05bbee9b071f2acbd9473", size = 26202, upload-time = "2025-07-02T07:18:39.951Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -210,6 +266,30 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "port-for" +version = "0.7.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/84/ad5114c85217426d7a5170a74a6f9d6b724df117c2f3b75e41fc9d6c6811/port_for-0.7.4.tar.gz", hash = "sha256:fc7713e7b22f89442f335ce12536653656e8f35146739eccaeff43d28436028d", size = 25077, upload-time = "2024-10-09T12:28:38.875Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/a2/579dcefbb0285b31f8d65b537f8a9932ed51319e0a3694e01b5bbc271f92/port_for-0.7.4-py3-none-any.whl", hash = "sha256:08404aa072651a53dcefe8d7a598ee8a1dca320d9ac44ac464da16ccf2a02c4a", size = 21369, upload-time = "2024-10-09T12:28:37.853Z" }, +] + +[[package]] +name = "psutil" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003, upload-time = "2025-02-13T21:54:07.946Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051, upload-time = "2025-02-13T21:54:12.36Z" }, + { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535, upload-time = "2025-02-13T21:54:16.07Z" }, + { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004, upload-time = "2025-02-13T21:54:18.662Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986, upload-time = "2025-02-13T21:54:21.811Z" }, + { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544, upload-time = "2025-02-13T21:54:24.68Z" }, + { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053, upload-time = "2025-02-13T21:54:34.31Z" }, + { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885, upload-time = "2025-02-13T21:54:37.486Z" }, +] + [[package]] name = "pydantic" version = "2.11.7" @@ -366,6 +446,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/9d/bf86eddabf8c6c9cb1ea9a869d6873b46f105a5d292d3a6f7071f5b07935/pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf", size = 15157, upload-time = "2025-07-16T04:29:24.929Z" }, ] +[[package]] +name = "pytest-redis" +version = "3.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mirakuru" }, + { name = "port-for" }, + { name = "pytest" }, + { name = "redis" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/d4/4d37bbe92ce7e991175115a30335dd591dfc9a086b10b5ed58133b286a17/pytest_redis-3.1.3.tar.gz", hash = "sha256:8bb76be4a749f1907c8b4f04213df40b679949cc2ffe39657e222ccb912aecd9", size = 38202, upload-time = "2024-11-27T08:42:22.322Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/5f/d9e617368aeee75609e43c66ff22e9d216c761f5b4290d56927d493ec618/pytest_redis-3.1.3-py3-none-any.whl", hash = "sha256:7fd6eb54ed0878590b857e1011b031c38aa3e230a53771739e845d3fc6b05d79", size = 32856, upload-time = "2024-11-27T08:42:19.837Z" }, +] + [[package]] name = "python-dotenv" version = "1.1.1" @@ -375,6 +470,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, ] +[[package]] +name = "redis" +version = "6.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-timeout", marker = "python_full_version < '3.11.3'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0d/d6/e8b92798a5bd67d659d51a18170e91c16ac3b59738d91894651ee255ed49/redis-6.4.0.tar.gz", hash = "sha256:b01bc7282b8444e28ec36b261df5375183bb47a07eb9c603f284e89cbc5ef010", size = 4647399, upload-time = "2025-08-07T08:10:11.441Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/02/89e2ed7e85db6c93dfa9e8f691c5087df4e3551ab39081a4d7c6d1f90e05/redis-6.4.0-py3-none-any.whl", hash = "sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f", size = 279847, upload-time = "2025-08-07T08:10:09.84Z" }, +] + [[package]] name = "sniffio" version = "1.3.1" @@ -384,6 +491,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, ] +[[package]] +name = "sortedcontainers" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, +] + [[package]] name = "starlette" version = "0.47.2" From df9bf486b5c0327c8de47a2d41b2c80a9f76e4c6 Mon Sep 17 00:00:00 2001 From: Pasindu P <99115820+dec1belPP@users.noreply.github.com> Date: Sat, 20 Sep 2025 09:21:45 +1000 Subject: [PATCH 2/5] Discard changes to app/services/graph_service.py --- app/services/graph_service.py | 1 - 1 file changed, 1 deletion(-) diff --git a/app/services/graph_service.py b/app/services/graph_service.py index 7c6ee4b..dda8780 100644 --- a/app/services/graph_service.py +++ b/app/services/graph_service.py @@ -67,4 +67,3 @@ async def call_graph_api(path: str): except ValueError: logger.warning("Response is not valid JSON") return {"raw_response": resp.text} - From 4627232d5a4dbf4dc253bb34d0475545618faa60 Mon Sep 17 00:00:00 2001 From: Pasindu P <99115820+dec1belPP@users.noreply.github.com> Date: Sat, 20 Sep 2025 09:21:54 +1000 Subject: [PATCH 3/5] Discard changes to app/api/v1/graph.py --- app/api/v1/graph.py | 1 - 1 file changed, 1 deletion(-) diff --git a/app/api/v1/graph.py b/app/api/v1/graph.py index 358884b..7cc4448 100644 --- a/app/api/v1/graph.py +++ b/app/api/v1/graph.py @@ -136,4 +136,3 @@ async def get_group_members(group_id: str): """Fetch members of a specific group.""" data = await call_graph_api(f"groups/{group_id}/members") return [UserResponse(**user) for user in data.get("value", [])] - From 60ca42597933b8afbf776efedcd4337447eac1da Mon Sep 17 00:00:00 2001 From: Pasindu P <99115820+dec1belPP@users.noreply.github.com> Date: Sat, 20 Sep 2025 09:22:16 +1000 Subject: [PATCH 4/5] Discard changes to .env.example --- .env.example | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.env.example b/.env.example index 8fa1e64..c04c89c 100644 --- a/.env.example +++ b/.env.example @@ -7,7 +7,7 @@ VERSION = "0.1.0" # Azure AD Configuration AZURE_CLIENT_ID = "your_client_id" -AZURE_CLIENT_SECRET = "your_client_secrect" +AZURE_CLIENT_SECRET = "your_client_secret" AZURE_TENANT_ID = "your_tenant_id" # Logging @@ -19,6 +19,4 @@ ALLOWED_HOSTS = "["*"]" API_PREFIX = "/api/v1" # Microsoft Graph API -GRAPH_API_BASE_URL = "https://graph.microsoft.com/v1.0" - - +GRAPH_API_BASE_URL = "https://graph.microsoft.com/v1.0" \ No newline at end of file From 33b191e6d806457e5638f5d28b6d6bb18bd0b366 Mon Sep 17 00:00:00 2001 From: Jes360 Date: Sat, 20 Sep 2025 21:34:31 +1000 Subject: [PATCH 5/5] addressed reviewer feedback --- app/api/middleware/cache_middleware.py | 63 ++++++++++++++++++-------- app/api/v1/cache.py | 2 +- app/main.py | 12 ++++- app/services/cache_service.py | 36 ++++++++++++--- pyproject.toml | 1 - 5 files changed, 87 insertions(+), 27 deletions(-) diff --git a/app/api/middleware/cache_middleware.py b/app/api/middleware/cache_middleware.py index 8e2e3f1..cff05c0 100644 --- a/app/api/middleware/cache_middleware.py +++ b/app/api/middleware/cache_middleware.py @@ -3,29 +3,45 @@ from app.services.cache_service import cache_service from app.core.config import settings import json +from structlog import get_logger + +logger = get_logger() + class CacheMiddleware: - """Middleware to cache GET responses.""" + """Middleware to cache GET responses, including headers.""" def __init__(self, app: ASGIApp): self.app = app async def __call__(self, scope: Scope, receive: Receive, send: Send): - # Only cache HTTP GET requests - if scope["type"] == "http" and scope["method"] == "GET": + # Only cache HTTP GET requests and if caching is enabled + if scope["type"] == "http" and scope["method"] == "GET" and settings.CACHE_ENABLED: request = Request(scope, receive) key = request.url.path + "?" + (request.url.query or "") - if settings.CACHE_ENABLED: - # Try to fetch from cache + try: cached = await cache_service.get(key) - if cached: - # Return cached response - headers = {"content-type": "application/json"} - response = Response(content=cached, status_code=200, headers=headers) - await response(scope, receive, send) - return - - # Capture the response + except Exception as e: + logger.error("Redis failure on cache GET", error=str(e)) + cached = None + + if cached: + # Try to restore full response (body + headers + status) + try: + cached_obj = json.loads(cached) + response = Response( + content=cached_obj.get("body", ""), + status_code=cached_obj.get("status_code", 200), + headers=cached_obj.get("headers", {}), + ) + logger.debug("Cache hit", key=key) + except Exception as e: + logger.error("Failed to parse cached response", error=str(e)) + response = Response(content=cached, status_code=200) + await response(scope, receive, send) + return + + # Capture and cache the response if not in cache responder = _ResponseCatcher(self.app, key) await responder(scope, receive, send) return @@ -35,7 +51,7 @@ async def __call__(self, scope: Scope, receive: Receive, send: Send): class _ResponseCatcher: - """Helper to capture response body and cache it.""" + """Helper to capture response body, headers, and cache them.""" def __init__(self, app: ASGIApp, key: str): self.app = app @@ -48,13 +64,24 @@ async def __call__(self, scope: Scope, receive: Receive, send: Send): async def send_wrapper(message): if message["type"] == "http.response.start": self.status_code = message["status"] - self.headers = dict(message.get("headers", [])) + raw_headers = message.get("headers", []) + # Decode headers into dict[str, str] + self.headers = {k.decode(): v.decode() for k, v in raw_headers} elif message["type"] == "http.response.body": self.body += message.get("body", b"") await send(message) await self.app(scope, receive, send_wrapper) - # Cache successful GET responses - if self.status_code == 200: - await cache_service.set(self.key, self.body.decode(), settings.CACHE_TTL_DEFAULT) + # Cache successful GET responses, only if enabled and Redis is available + if self.status_code == 200 and settings.CACHE_ENABLED: + try: + payload = { + "body": self.body.decode(), + "headers": self.headers, + "status_code": self.status_code, + } + await cache_service.set(self.key, json.dumps(payload), settings.CACHE_TTL_DEFAULT) + logger.debug("Response cached", key=self.key, ttl=settings.CACHE_TTL_DEFAULT) + except Exception as e: + logger.error("Redis failure on cache SET", error=str(e)) diff --git a/app/api/v1/cache.py b/app/api/v1/cache.py index da83a69..60574c3 100644 --- a/app/api/v1/cache.py +++ b/app/api/v1/cache.py @@ -3,7 +3,6 @@ from app.core.config import settings router = APIRouter() -PREFIX = settings.API_PREFIX + "/cache" @router.get("/status") async def cache_status(): @@ -35,3 +34,4 @@ async def cache_stats(): """ stats = cache_service.stats() return stats + return stats diff --git a/app/main.py b/app/main.py index 09fae4e..32e7a22 100644 --- a/app/main.py +++ b/app/main.py @@ -9,6 +9,7 @@ from app.api.v1.graph import router as graph_router from app.api.middleware.cache_middleware import CacheMiddleware from app.utils.logger import logger +from app.services.cache_service import cache_service def create_app() -> FastAPI: @@ -23,7 +24,16 @@ def create_app() -> FastAPI: configure_middleware(app, settings) configure_routing(app, settings) configure_exception_handlers(app) - + + # Redis connection management + @app.on_event("startup") + async def startup_event(): + await cache_service.init() + + @app.on_event("shutdown") + async def shutdown_event(): + await cache_service.close() + await cache_service.wait_closed() return app diff --git a/app/services/cache_service.py b/app/services/cache_service.py index 5895ddc..cb5b086 100644 --- a/app/services/cache_service.py +++ b/app/services/cache_service.py @@ -1,4 +1,4 @@ -import aioredis +import redis.asyncio as redis import asyncio from typing import Optional from app.core.config import settings @@ -10,18 +10,33 @@ class CacheService: """Redis-based cache service for AutoAudit API.""" def __init__(self): + self.redis = None + # Stats + self.hits = 0 + self.misses = 0 + + async def init(self): # Initialize Redis connection pool - self.redis = aioredis.from_url( + self.redis = await redis.from_url( settings.REDIS_URL, encoding="utf-8", decode_responses=True, ) - # Stats - self.hits = 0 - self.misses = 0 + + async def close(self): + if self.redis: + await self.redis.close() + + async def wait_closed(self): + if self.redis: + await self.redis.connection_pool.disconnect() async def get(self, key: str) -> Optional[str]: """Retrieve a value from cache.""" + if not self.redis: + logger.warning("Redis not initialized") + self.misses += 1 + return None # Use namespaced cache keys to avoid collisions across environments/projects value = await self.redis.get(f"{settings.CACHE_KEY_PREFIX}:{key}") if value is None: @@ -34,6 +49,9 @@ async def get(self, key: str) -> Optional[str]: async def set(self, key: str, value: str, ttl: Optional[int] = None) -> None: """Set a value in cache with TTL.""" + if not self.redis: + logger.warning("Redis not initialized") + return expire = ttl or settings.CACHE_TTL_DEFAULT await self.redis.set( f"{settings.CACHE_KEY_PREFIX}:{key}", @@ -44,11 +62,17 @@ async def set(self, key: str, value: str, ttl: Optional[int] = None) -> None: async def delete(self, key: str) -> None: """Delete a key from cache.""" + if not self.redis: + logger.warning("Redis not initialized") + return await self.redis.delete(f"{settings.CACHE_KEY_PREFIX}:{key}") logger.debug("Cache delete", key=key) async def clear(self) -> None: """Clear the entire cache (use with caution).""" + if not self.redis: + logger.warning("Redis not initialized") + return await self.redis.flushdb() logger.warning("Cache cleared") @@ -62,5 +86,5 @@ def stats(self) -> dict: "hit_rate": f"{hit_rate:.2f}%", } -# Instantiate a singleton service +# Singleton pattern with async init cache_service = CacheService() diff --git a/pyproject.toml b/pyproject.toml index d0aecca..219675c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,6 @@ dependencies = [ "python-dotenv>=1.1.1", "structlog>=25.4.0", "redis>=6.4.0", - "aioredis>=2.0.1", ] [project.optional-dependencies]