diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml index 09809c7..d502246 100644 --- a/.github/workflows/workflow.yml +++ b/.github/workflows/workflow.yml @@ -37,6 +37,14 @@ jobs: - name: Checkout Code uses: actions/checkout@v4 + - name: Transfer deploy file to EC2 + uses: appleboy/scp-action@master + with: + host: ${{ secrets.REMOTE_HOST }} + username: ${{ secrets.REMOTE_USER }} + key: ${{ secrets.SSH_PRIVATE_KEY }} + source: ./deployment/ + target: /home/ubuntu/deployment deploy: name: Deploy @@ -52,8 +60,8 @@ jobs: port: 22 script: | # 1. cd - mkdir -p ~/dearbelly - cd ~/dearbelly + mkdir -p ~/dearbelly/deployment + cd ~/dearbelly/deployment # 2. .env file echo "${{ secrets.ENV }}" > .env @@ -69,5 +77,5 @@ jobs: docker pull ${{ secrets.ECR_URI }}/dearbelly-cv:latest # 6. docker start - # TODO : 스크립트 작성 - docker run -d --name app-blue -p 8000:8000 ${{ secrets.ECR_URI }}/dearbelly-cv:latest \ No newline at end of file + sudo chmod +x deploy.sh + source deploy.sh \ No newline at end of file diff --git a/.gitignore b/.gitignore index 3b1d388..debaf2a 100644 --- a/.gitignore +++ b/.gitignore @@ -40,7 +40,7 @@ MANIFEST pip-log.txt pip-delete-this-directory.txt -# Unit test / coverage reports +# Unit tests / coverage reports htmlcov/ .tox/ .nox/ diff --git a/app/api/endpoints/predictions.py b/app/api/endpoints/predictions.py index bfa2e55..6015b35 100644 --- a/app/api/endpoints/predictions.py +++ b/app/api/endpoints/predictions.py @@ -4,30 +4,43 @@ import redis.asyncio as redis import uuid from datetime import datetime +import json router = APIRouter() def get_redis_client(request: Request) -> redis.Redis: return request.app.state.redis_client +""" +테스트를 위한 임시적인 API +""" @router.post("/predict", status_code=202) async def create_prediction_job(job_request: JobRequest, redis_client: redis.Redis = Depends(get_redis_client)): - correlation_id = str(uuid.uuid4()) + correlationId = str(uuid.uuid4()) job = ImageJob( correlationId=correlationId, - presignedUrl=job_request.presigned_url, + presignedUrl=job_request.presignedUrl, replyQueue=settings.STREAM_RESULT, - callbackUrl=None, - contentType="image/jpeg", + contentType=job_request.contentType, createdAt=datetime.utcnow().isoformat(), ttlSec=3600, ) - await redis_client.xadd( + # 타입에 맞도록 넣어주기 + correlationId = job.correlationId + payload = json.dumps(job.dict()) + print("분석 결과 발행 시작...") + entry_id = await redis_client.xadd( settings.STREAM_JOB, - {"json": job.model_dump_json()}, + { + "type": "image_results", + "payload": payload, + "correlationId": correlationId, + }, maxlen=10_000, approximate=True, ) - return {"job_id": correlation_id} \ No newline at end of file + print("분석 결과 발행 완료...") + + return {"job_id": correlationId} \ No newline at end of file diff --git a/app/core/config.py b/app/core/config.py index 5e336f3..1bf7e00 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -5,14 +5,11 @@ load_dotenv() redis_url = os.getenv("REDIS_SERVER_URL") -bucket_name = os.getenv("BUCKET_NAME") -s3_region = os.getenv("S3_REGION") - +openai_key = os.getenv("OPENAI_API_KEY") class Settings(BaseSettings): REDIS_URL: str = redis_url - S3_REGION: str = bucket_name - BUCKET_NAME: str = s3_region + OPENAI_KEY: str = openai_key STREAM_JOB: str = "image.jobs" # SpringBoot에서 job 발행 (FastAPI에서 listen) STREAM_RESULT: str = "image.results" # FastAPI에서 결과 발행 (SpringBoot에서 listen) diff --git a/app/core/lifespan.py b/app/core/lifespan.py index 9bdc15f..2db715d 100644 --- a/app/core/lifespan.py +++ b/app/core/lifespan.py @@ -1,26 +1,24 @@ import asyncio from contextlib import asynccontextmanager -import redis.asyncio as redis + +import anyio.to_thread +import redis from fastapi import FastAPI +from app.worker.redis_client import redis_client from app.core.config import settings from app.worker.worker import JobWorker @asynccontextmanager async def lifespan(app: FastAPI): - redis_client = redis.from_url(settings.REDIS_URL, decode_responses=True) + limiter = anyio.to_thread.current_default_thread_limiter() + limiter.total_tokens = 100 - try: - await redis_client.xgroup_create( - name=settings.STREAM_JOB, - groupname=settings.GROUP_NAME, - id="$", - mkstream=True - ) - except redis.ResponseError as e: - if "BUSYGROUP" not in str(e): - raise + redis_client.xgroup_create( + stream_name=settings.STREAM_JOB, + group_name=settings.GROUP_NAME, + ) worker = JobWorker(redis_client) worker_task = asyncio.create_task(worker.run()) diff --git a/app/schemas/job.py b/app/schemas/job.py index 4313bc5..f75ed14 100644 --- a/app/schemas/job.py +++ b/app/schemas/job.py @@ -2,20 +2,25 @@ from pydantic import BaseModel, Field class JobRequest(BaseModel): - presigned_url: str = Field(..., description="다운로드할 이미지의 Presigned URL") + presignedUrl: str = Field(..., description="다운로드할 이미지의 Presigned URL") +""" +FastAPI가 image.results Stream에 발행하는 메시지 +""" class JobResult(BaseModel): - pill_name: str - correlation_id: str - label: str - confidence: float - finished_at: str + correlationId: str + pillName: str + isSafe: int + description: str + finishedAt: str +""" +Spring으로부터 받는 Job(image.jobs 구독) +""" class ImageJob(BaseModel): - correlation_id: str = Field(alias="correlationId") - presigned_url: str = Field(alias="presignedUrl") - reply_queue: str = Field(alias="replyQueue") - callback_url: str | None = Field(alias="callbackUrl") - content_type: str = Field(alias="contentType") - created_at: str = Field(alias="createdAt") - ttl_sec: int = Field(alias="ttlSec") + correlationId: str = Field(alias="correlationId") + presignedUrl: str = Field(alias="presignedUrl") + replyQueue: str = Field(alias="replyQueue") + contentType: str = Field(alias="contentType") + createdAt: str = Field(alias="createdAt") + ttlSec: int = Field(alias="ttlSec") diff --git a/app/services/openai_service.py b/app/services/openai_service.py new file mode 100644 index 0000000..7455f19 --- /dev/null +++ b/app/services/openai_service.py @@ -0,0 +1,66 @@ +from app.core.config import settings +from openai import OpenAI +import json + +client = OpenAI(api_key=settings.OPENAI_KEY) + +class PregnancySafetyChecker: + def __init__(self, client: OpenAI): + self.client = client + + """ + - isSafe: 안전하면 1, 안전하지 않으면 0 + - description: 복용 가능 여부 설명 + """ + def ask_chatgpt_about_pregnancy_safety(self, pill_name: str) -> tuple[str, int]: + prompt = f""" + 약 이름: {pill_name} + 질문: 이 약은 임산부가 복용해도 안전한가요? 복용 가능 여부와 주의사항을 알려주세요. + description 안에는 문장마다 \\n 을 적용하세요. + 결과를 JSON 형식으로 정확히 반환하세요. 설명이나 다른 텍스트를 절대 덧붙이지 마세요. + 스키마: + {{ + "description": "복용 가능 여부 및 주의사항에 대한 설명", + "isSafe": 1 또는 0 + }} + """ + + response = self.client.chat.completions.create( + model="gpt-4o-mini", + messages=[{"role": "user", "content": prompt}], + temperature=0, + max_tokens=600, + response_format={"type": "json_object"} + ) + print("GPT Asking 성공...") + raw = response.choices[0].message.content.strip() + + try: + data = json.loads(raw) + except json.JSONDecodeError: + start = raw.find("{") + end = raw.rfind("}") + if start != -1 and end != -1 and start < end: + data = json.loads(raw[start:end+1]) + else: + # 디버깅 + preview = raw[:200].replace("\n", "\\n") + raise ValueError(f"응답이 유효한 JSON이 아닙니다. preview='{preview}'") + + description = data.get("description") + isSafe = data.get("isSafe") + + if isinstance(isSafe, bool): + isSafe = 1 if isSafe else 0 + elif isinstance(isSafe, str): + isSafe = 1 if isSafe.strip() in {"1", "true", "True"} else 0 + elif not isinstance(isSafe, int): + isSafe = 0 + + if not isinstance(description, str): + description = "" # 안전장치 + + return description, int(isSafe) + + +checker = PregnancySafetyChecker(client) \ No newline at end of file diff --git a/app/services/predictor_service.py b/app/services/predictor_service.py index c06efcd..175003c 100644 --- a/app/services/predictor_service.py +++ b/app/services/predictor_service.py @@ -6,6 +6,7 @@ from PIL import Image import json from pathlib import Path +from io import BytesIO class LightCNN(nn.Module): def __init__(self, num_classes): @@ -53,8 +54,8 @@ def _load_model(self, model_path: Path) -> LightCNN: model.eval() return model - def predict(self, image_path: Path) -> tuple[str, str, float]: - image = Image.open(image_path).convert('RGB') + def predict(self, stream_file: BytesIO) -> tuple[str, str, float]: + image = Image.open(stream_file).convert('RGB') input_tensor = self.transform(image).unsqueeze(0).to(self.device) with torch.no_grad(): diff --git a/app/services/s3_service.py b/app/services/s3_service.py index b85d4fc..1e8debb 100644 --- a/app/services/s3_service.py +++ b/app/services/s3_service.py @@ -1,27 +1,15 @@ -import boto3 -from botocore.config import Config as BotoConfig -from pathlib import Path import requests - -from app.core.config import settings +from io import BytesIO class S3Service: def __init__(self): - self.client = boto3.client( - "s3", - region_name=settings.S3_REGION, - config=BotoConfig( - retries={"max_attempts": 5, "mode": "standard"}, - read_timeout=30, - connect_timeout=5, - ), - ) + pass - def download_file_from_presigned_url(self, presigned_url: str, destination: Path): + def download_file_from_presigned_url(self, presigned_url: str) -> BytesIO: response = requests.get(presigned_url) response.raise_for_status() - with open(destination, "wb") as f: - f.write(response.content) + # response의 content를 BytesIO로 감싸 반환 + return BytesIO(response.content) s3_service = S3Service() diff --git a/app/worker/redis_client.py b/app/worker/redis_client.py index 8b7339b..1a79bcb 100644 --- a/app/worker/redis_client.py +++ b/app/worker/redis_client.py @@ -1,10 +1,172 @@ import redis +import json + +from redis.asyncio import Redis as AsyncRedis from pydantic import BaseModel, Field -from typing import Any, Dict +from typing import Any, Dict, List, Optional, Union from app.core.config import settings -redis_client = redis.asyncio.from_url(settings.REDIS_URL, decode_responses=True) - +# Redis Stream Definition class PublishRequest(BaseModel): - stream: str = Field(default=settings.JOB_STREAM, description="Redis Stream Job name") - payload: Dict[str, Any] \ No newline at end of file + stream: str = Field(default=settings.STREAM_JOB, description="Redis Stream Job name") + payload: Dict[str, Any] + +class RedisStreamClient: + def __init__(self): + self.redis_client = AsyncRedis.from_url( + url=settings.REDIS_URL, + decode_responses=True, + ) + + @classmethod + def init(cls): + return cls() + + @staticmethod + def _to_scalar(v: Any) -> Union[str, bytes, int, float]: + # Redis XADD : str/bytes/int/float 허용 + if isinstance(v, (str, bytes, int, float)): + return v + # 그 외는 JSON 문자열로 직렬화 (ensure_ascii=False로 한글 보존) + return json.dumps(v, ensure_ascii=False) + + + @classmethod + def _sanitize_fields_for_xadd(cls, fields: Dict[str, Any]) -> Dict[str, Union[str, bytes, int, float]]: + clean: Dict[str, Union[str, bytes, int, float]] = {} + for k, v in fields.items(): + if not isinstance(k, str): + k = str(k) + clean[k] = cls._to_scalar(v) + return clean + + + # Fast API 에서 Publish + async def xadd( + self, + stream_name: str, + fields: Dict[str, Any], + *, + maxlen: Optional[int] = 10_000, + approximate: bool = True, + nomkstream: bool = False, + id: str = "*", + ) -> str: + safe_fields = self._sanitize_fields_for_xadd(fields) + return await self.redis_client.xadd( + stream_name, + safe_fields, + id=id, + maxlen=maxlen, + approximate=approximate, + nomkstream=nomkstream, + ) + + # Group 단위로 읽어오기 + async def xreadgroup( + self, + group_name: str, + consumer_name: str, + stream_name: str, + count: Optional[int] = None, + block: Optional[int] = None, # ms + ) -> List[tuple]: + # Consumer Group 생성 (없으면) + try: + await self.redis_client.xgroup_create( + name=stream_name, + groupname=group_name, + id="0", + mkstream=True, + ) + except redis.exceptions.ResponseError as e: + if "BUSYGROUP" not in str(e): + raise + + streams = {stream_name: ">"} # 신규 메시지 + response = await self.redis_client.xreadgroup( + group_name, + consumer_name, + streams, + count=count, + block=block, + ) + return response + + # Consumer 처리 완료 + async def xack(self, stream_name: str, group_name: str, message_ids: List[str]) -> int: + return await self.redis_client.xack(stream_name, group_name, *message_ids) + + # 완료 시 삭제 + async def xack_and_del( + self, + stream_name: str, + group_name: str, + message_ids: Union[str, List[str]], + ) -> int: + ids = [message_ids] if isinstance(message_ids, str) else list(message_ids) + acked_count = await self.redis_client.xack(stream_name, group_name, *ids) + if acked_count > 0: + await self.redis_client.xdel(stream_name, *ids) + return acked_count + + # Group 생성 + async def xgroup_create(self, stream_name: str, group_name: str, id: str = "$") -> bool: + try: + await self.redis_client.xgroup_create(stream_name, group_name, id, mkstream=True) + return True + except redis.exceptions.ResponseError as e: + if "BUSYGROUP" in str(e): + print(f"Consumer group '{group_name}' already exists.") + return False + raise + + # 메시지 재처리 지원 + async def xclaim( + self, + stream_name: str, + group_name: str, + consumer_name: str, + min_idle_time: int, + message_ids: List[str], + ) -> List[tuple]: + return await self.redis_client.xclaim( + stream_name=stream_name, + group_name=group_name, + consumer_name=consumer_name, + min_idle_time=min_idle_time, + message_ids=message_ids, + ) + + # 자동으로 재청구 + async def xautoclaim( + self, + name: str, + groupname: str, + consumername: str, + min_idle_time: int, + start_id: str = "0-0", + count: Optional[int] = None, + justid: bool = False, + ): + res = await self.redis_client.xautoclaim( + name=name, + groupname=groupname, + consumername=consumername, + min_idle_time=min_idle_time, + start_id=start_id, + count=count, + justid=justid, + ) + # 2-튜플/3-튜플 호환하도록 전처리 + if isinstance(res, (list, tuple)) and len(res) == 3: + next_id, messages, _deleted = res + return next_id, messages + return res + + + # 종료 + async def aclose(self): + await self.redis_client.close() + +redis_client = RedisStreamClient.init() \ No newline at end of file diff --git a/app/worker/tasks.py b/app/worker/tasks.py index f3b2eed..63dfdc5 100644 --- a/app/worker/tasks.py +++ b/app/worker/tasks.py @@ -1,47 +1,58 @@ import asyncio -import json -from pathlib import Path import redis.asyncio as redis from datetime import datetime from app.core.config import settings from app.schemas.job import ImageJob, JobResult +from app.services.openai_service import checker from app.services.predictor_service import predictor_service from app.services.s3_service import s3_service +""" +이미지를 다운 -> 다운 한 것에 대하여 모델 분석 요청 +""" async def process_image_scan(job: ImageJob, redis_client: redis.Redis): - correlation_id = job.correlationId - print(f"[task] Start image scan for job_id={correlation_id}") - - temp_image_path = Path(f"/tmp/{correlation_id}.jpg") - + correlationId = job.correlationId + print(f"[task] Start image scan for job_id={correlationId}") try: - s3_service.download_file_from_presigned_url(job.presignedUrl, temp_image_path) - pill_name, label, confidence = predictor_service.predict(temp_image_path) + stream_file = await asyncio.to_thread( + s3_service.download_file_from_presigned_url, + job.presignedUrl + ) - finished_at = datetime.utcnow().isoformat() + stream_file.seek(0) + + pillName, label, confidence = await asyncio.to_thread( + predictor_service.predict, + stream_file + ) + print(f"[task] Start Asking GPT for job_id={correlationId}") + description, isSafe = checker.ask_chatgpt_about_pregnancy_safety(pillName) + finishedAt = datetime.utcnow().isoformat() result = JobResult( - pill_name=pill_name, - correlation_id=correlation_id, - label=label, - confidence=confidence, - finished_at=finished_at, + correlationId=correlationId, + pillName=pillName, + isSafe=isSafe, + description=description, + finishedAt=finishedAt, ) await redis_client.xadd( settings.STREAM_RESULT, - {"json": result.model_dump_json()}, + { + "correlationId": correlationId, + "type": "image_results", + "payload": result.model_dump_json()}, maxlen=10_000, approximate=True, ) - print(f"[task] Image scan finished for job_id={correlation_id}") + print(f"[task] Image scan successfully finished for job_id={correlationId}") except Exception as e: - print(f"[task] Failed to process job_id={correlation_id}: {e}") + print(f"[task] Failed to process job_id={correlationId}: {e}") finally: - if temp_image_path.exists(): - temp_image_path.unlink() + print(f"[task] Image scan finished for job_id={correlationId}") diff --git a/app/worker/worker.py b/app/worker/worker.py index e48e62c..751455a 100644 --- a/app/worker/worker.py +++ b/app/worker/worker.py @@ -1,12 +1,51 @@ - +import json import asyncio -import redis.asyncio as redis -from app.core.config import Settings +from app.worker.redis_client import redis_client +from app.core.config import settings from app.schemas.job import ImageJob from app.worker.tasks import process_image_scan +""" +Redis Stream에 정의한 유효한 형식 메시지를 위한 전처리 함수 +""" +def _to_scalr(v): + # XADD 허용 타입: str, bytes, int, float + if isinstance(v, (str, bytes, int, float)): + return v + # 그 외는 JSON str + return json.dumps(v, ensure_ascii=False) + +""" +Decoding +""" +def _decode(b): + if isinstance(b, (bytes, bytearray)): + return b.decode() + else: + return b + + +def _sanitize_fields_for_xadd(fields: dict) -> dict: + # 정제 + cleaned = {} + for k, v in fields.items(): + k = _decode(k) + if isinstance(v, (bytes, bytearray)): + try: + v = v.decode() + except Exception: + pass + else: + v = _to_scalr(v) + cleaned[k] = v + return cleaned + + +""" +"image.jobs"를 구독 +""" class JobWorker: - def __init__(self, redis_client: redis.Redis): + def __init__(self, redis_client: redis_client): self.redis_client = redis_client async def run(self): @@ -16,10 +55,11 @@ async def run(self): while True: try: + # Consumer의 메시지 읽기 resp = await self.redis_client.xreadgroup( - groupname=settings.GROUP_NAME, - consumername=settings.CONSUMER_NAME, - streams={settings.STREAM_JOB: ">"}, + group_name=settings.GROUP_NAME, + consumer_name=settings.CONSUMER_NAME, + stream_name=settings.STREAM_JOB, count=10, block=5000, ) @@ -27,19 +67,52 @@ async def run(self): _, entries = resp[0] for msg_id, fields in entries: try: - job = ImageJob.model_validate_json(fields["json"]) - await process_image_scan(job, redis_client) - # 처리 성공 시에만 ack - await redis_client.xack(settings.STREAM_JOB, settings.GROUP_NAME, msg_id) + job_type = fields.get(b"type") or fields.get("type") + correlation_id = fields.get(b"correlationId") or fields.get("correlationId") + payload = fields.get(b"payload") or fields.get("payload") + + # type 검증 + if job_type in (b"image_jobs", "image_jobs"): + + # payload 전처리 + if isinstance(payload, (bytes, bytearray)): + payload_str = payload.decode() + else: + payload_str = payload if isinstance(payload, str) else json.dumps(payload) + + # 최종 반환 data + data = json.loads(payload_str) + print(f"Job received id={msg_id} correlationId={correlation_id} payload={data}") + + job = ImageJob.model_validate(data) + # XADD까지 호출 + task = asyncio.create_task(process_image_scan(job, redis_client)) + print(f"[worker] {task} 발행 성공") + + # 처리 성공 시에만 ack 후 del + task.add_done_callback(lambda t: asyncio.create_task( + self.redis_client.xack_and_del(settings.STREAM_JOB, settings.GROUP_NAME, msg_id) + if not t.exception() else + self.redis_client.xadd(f"{settings.STREAM_JOB}:DLQ", + {"id": msg_id, "error": str(t.exception()), **fields}) + )) + + else: + # job_type 불일치 경우 -> DLQ + clean = _sanitize_fields_for_xadd(fields) + clean.update({"id": _decode(msg_id), "error": "unexpected job type"}) + await self.redis_client.xadd(f"{settings.STREAM_JOB}:DLQ", clean) + except asyncio.CancelledError: # 취소되면 재전송되도록 ack 하지 않음 raise + except Exception as e: - await self.redis_client.xadd( - f"{settings.STREAM_JOB}:DLQ", - {"id": msg_id, "error": str(e), **fields}, - ) + clean = _sanitize_fields_for_xadd(fields) + clean.update({"id": _decode(msg_id), "error": str(e)}) + await self.redis_client.xadd(f"{settings.STREAM_JOB}:DLQ", clean) + # 주기적으로 AutoClaim now = asyncio.get_event_loop().time() if now - last_reclaim > reclaim_every_sec: last_reclaim = now @@ -53,14 +126,34 @@ async def run(self): ) for msg_id, fields in claimed: try: - job = ImageJob.model_validate_json(fields["json"]) - asyncio.create_task(process_image_scan(job, self.redis_client)) - await self.redis_client.xack(settings.STREAM_JOB, settings.GROUP_NAME, msg_id) + payload = fields.get(b"payload") or fields.get("payload") + if isinstance(payload, (bytes, bytearray)): + payload = payload.decode() + job = ImageJob.model_validate_json(payload) + + task = asyncio.create_task(process_image_scan(job, self.redis_client)) + print(f"[worker] {task} 발행 성공") + + def _on_done(t: asyncio.Task, *, msg_id=msg_id, fields=fields): + async def _ack_or_dlq(): + exc = t.exception() + if exc is None: + await self.redis_client.xack_and_del(settings.STREAM_JOB, settings.GROUP_NAME, + msg_id) + else: + clean = _sanitize_fields_for_xadd(fields) + clean.update({"id": _decode(msg_id), "error": str(exc)}) + await self.redis_client.xadd(f"{settings.STREAM_JOB}:DLQ", clean) + + asyncio.create_task(_ack_or_dlq()) + + task.add_done_callback(_on_done) + except Exception as e: - await self.redis_client.xadd( - f"{settings.STREAM_JOB}:DLQ", - {"id": msg_id, "error": str(e), **fields}, - ) + clean = _sanitize_fields_for_xadd(fields) + clean.update({"id": _decode(msg_id), "error": str(e)}) + await self.redis_client.xadd(f"{settings.STREAM_JOB}:DLQ", clean) + except asyncio.CancelledError: print("[worker] cancelled; bye") break diff --git a/deployment/deploy.sh b/deployment/deploy.sh new file mode 100644 index 0000000..9f012d4 --- /dev/null +++ b/deployment/deploy.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +set -e + +ERR_MSG='' + +trap 'echo "Error occured: $ERR_MSG. Exiting deploy script."; exit 1' ERR + + +# 현재 포트 파악 +if sudo docker ps --filter "name=app-blue" --quiet | grep -E .; then + echo "Blue down, Green Up " + BEFORE_COMPOSE_COLOR="blue" + AFTER_COMPOSE_COLOR="green" + HOST_PORT="8001" +else + echo "Green down, Blue up" + BEFORE_COMPOSE_COLOR="green" + AFTER_COMPOSE_COLOR="blue" + HOST_PORT="8000" +fi + +echo "Pulling new image" +# docker pull +docker compose pull app-${AFTER_COMPOSE_COLOR} +docker compose up -d --no-deps --force-recreate app-${AFTER_COMPOSE_COLOR} + + +# 새 컨테이너가 running 될 때까지 대기 +for i in $(seq 1 600); do + if docker ps --filter "name=^/app-${AFTER_COMPOSE_COLOR}$" --filter "status=running" --format '{{.Names}}' | grep -q .; then + echo "New app-${AFTER_COLOR} container is running." + break + fi + sleep 1 + if [ "$i" -eq 60 ]; then + echo "New container failed to start in time." >&2 + exit 1 + fi +done + +# 이전 컨테이너 종료 및 정리 +if docker ps --filter "name=app-${AFTER_COMPOSE_COLOR}" --filter "status=running" | grep -q .; then + echo "Stopping old container app-${BEFORE_COMPOSE_COLOR}" + docker stop app-${BEFORE_COMPOSE_COLOR} || true + docker rm app-${BEFORE_COMPOSE_COLOR} || true + docker image prune -af +fi + +echo "Deployment success." +exit 0 \ No newline at end of file diff --git a/deployment/docker-compose.yml b/deployment/docker-compose.yml new file mode 100644 index 0000000..b7d1c2b --- /dev/null +++ b/deployment/docker-compose.yml @@ -0,0 +1,30 @@ +services: + app-blue: + image: ${ECR_URI}/dearbelly-cv:latest + ports: + - "8000:8000" + env_file: + - /home/ubuntu/dearbelly/deployment/.env + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: [gpu] + container_name: app-blue + + app-green: + image: ${ECR_URI}/dearbelly-cv:latest + ports: + - "8001:8000" + env_file: + - /home/ubuntu/dearbelly/deployment/.env + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: [ gpu ] + container_name: app-green \ No newline at end of file diff --git a/deployment/nginx.conf b/deployment/nginx.conf new file mode 100644 index 0000000..a4bff5b --- /dev/null +++ b/deployment/nginx.conf @@ -0,0 +1,21 @@ +events {} + +http { + upstream backend { + server app-blue:8000; + } + + server { + listen 80; + + location / { + proxy_pass http://backend/; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + + } + } +} \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 5cf02b2..d06a886 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,9 +3,9 @@ uvicorn redis pydantic pydantic-settings -boto3 requests torch==2.8.0 torchvision==0.23.0 Pillow==11.3.0 -dotenv \ No newline at end of file +dotenv +openai \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/s3_service_test.py b/tests/s3_service_test.py new file mode 100644 index 0000000..fe884b2 --- /dev/null +++ b/tests/s3_service_test.py @@ -0,0 +1,39 @@ +import io +import pytest +from unittest.mock import patch, MagicMock + +from app.services.s3_service import s3_service + +class TestDownloadFile: + @patch("app.services.s3_service.requests.get") + def test_download_file_success(self, mock_get): + # arrange + mock_get.return_value = MagicMock() + mock_get.return_value.content = b"test file content" + mock_get.return_value.raise_for_status = MagicMock() + + # act + obj = s3_service + result = obj.download_file_from_presigned_url( + "http://fake-url.com" + ) + + # assert + assert isinstance(result, io.BytesIO) + assert result.getvalue() == b"test file content" + mock_get.assert_called_once_with( + "http://fake-url.com" + ) + mock_get.return_value.raise_for_status.assert_called_once() + + @patch("app.services.s3_service.requests.get") + def test_download_file_http_error(self, mock_get): + # arrange + mock_get.return_value = MagicMock() + mock_get.return_value.raise_for_status.side_effect = Exception("HTTP Error") + + obj = s3_service + + # act & assert + with pytest.raises(Exception, match="HTTP Error"): + obj.download_file_from_presigned_url("http://fake-url.com") \ No newline at end of file diff --git a/tests/test_main.py b/tests/test_main.py new file mode 100644 index 0000000..d0ad861 --- /dev/null +++ b/tests/test_main.py @@ -0,0 +1,10 @@ +from fastapi.testclient import TestClient + +from app.main import app + +client = TestClient(app=app) + +def test_health_check(): + response = client.get("/api/v1/health") + assert response.status_code == 200 + assert response.json() == {"status": "ok"} \ No newline at end of file