diff --git a/.cursorignore b/.cursorignore new file mode 100644 index 00000000..4ef9dbe6 --- /dev/null +++ b/.cursorignore @@ -0,0 +1,73 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +*.egg-info/ +dist/ +build/ + +# Virtual Environment +venv/ +env/ +ENV/ +.venv + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# Environment variables +*.env +.env.* +backend.env + +# Credentials (실제 인증 파일 - 민감 정보) +credentials/*.json +!credentials/*.example.json +crud/fcm/*.json +!crud/fcm/*.example.json + +# Database +*.sqlite3 +*.db + +# Media files (uploaded content) +media/ +static/ + +# Logs +*.log +logs/ + +# Celery +celerybeat-schedule +celerybeat.pid + +# Git +.git/ + +# Docker (수정 허용) + +# Cache +.cache/ +.pytest_cache/ + +# Coverage +htmlcov/ +.coverage +.coverage.* +coverage.xml + +# OS +.DS_Store +Thumbs.db + +# Migrations (optional - 주석 해제하면 migration 파일도 무시) +# */migrations/*.py +# !*/migrations/__init__.py + diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..5192b77a --- /dev/null +++ b/.dockerignore @@ -0,0 +1,75 @@ +# Git +.git/ +.gitignore + +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +*.egg-info/ +dist/ +build/ + +# Virtual Environment +venv/ +env/ +ENV/ +.venv/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# Environment files (런타임에 주입) +*.env +.env.* +backend.env + +# Credentials - 빌드 시 제외 (런타임에 볼륨 마운트) +credentials/*.json +!credentials/*.example.json +crud/fcm/*.json + +# Database +*.sqlite3 +*.db + +# Media files +media/ + +# Logs +*.log +logs/ + +# Cache +.cache/ +.pytest_cache/ + +# Coverage +htmlcov/ +.coverage +.coverage.* +coverage.xml + +# OS +.DS_Store +Thumbs.db + +# Documentation (이미지 빌드에 불필요) +docs/ +*.md +!README.md + +# Tests (프로덕션 이미지에 불필요한 경우) +# tests/ +# *_test.py +# test_*.py + +# Docker +Dockerfile* +docker-compose*.yml diff --git a/.github/workflows/backend-ci.yml b/.github/workflows/backend-ci.yml deleted file mode 100644 index 365eb0b7..00000000 --- a/.github/workflows/backend-ci.yml +++ /dev/null @@ -1,64 +0,0 @@ -name: CI - Pull Request Django Test - -on: - pull_request: - branches: [ develop ] # ✅ develop 브랜치로 향하는 PR에만 동작 - -jobs: - test: - runs-on: ubuntu-latest - - services: - mysql: - image: mysql:8 - env: - MYSQL_ROOT_PASSWORD: root - MYSQL_DATABASE: capstone - MYSQL_USER: sa - MYSQL_PASSWORD: 1234 - ports: - - 3306:3306 - options: >- - --health-cmd="mysqladmin ping --silent" - --health-interval=10s - --health-timeout=5s - --health-retries=5 - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Python 3.10 - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - - - name: Wait for DB to be ready - run: | - until mysqladmin ping -h"127.0.0.1" --silent; do - echo "Waiting for MySQL to be ready" - sleep 3 - done - - - name: Django check & makemigrations - env: - DJANGO_SETTINGS_MODULE: backend.settings.dev - run: | - python manage.py check - python manage.py makemigrations --check --dry-run - - - name: Run DB migrations (basic build test) - env: - DJANGO_SETTINGS_MODULE: backend.settings.dev - DB_NAME: capstone - DB_USER: sa - DB_PASSWORD: 1234 - DB_HOST: 127.0.0.1 # ✅ 여기가 핵심 - run: | - python manage.py migrate - diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml deleted file mode 100644 index 75867d01..00000000 --- a/.github/workflows/cd.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: CD - Deploy to Server via SSH - -on: - push: - branches: - - main # ✅ 오직 main 브랜치 push만 트리거 - workflow_dispatch: - -jobs: - deploy: - runs-on: ubuntu-latest - environment: production # ✅ production 환경 secrets와 연결 - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - name: List files for debug - run: ls -al && ls -al backend - - - name: Upload docker-compose file to server - uses: appleboy/scp-action@v0.1.4 - with: - host: ${{ secrets.SERVER_HOST }} - username: ${{ secrets.SERVER_USER }} - key: ${{ secrets.SERVER_SSH_KEY }} - source: . - target: /home/ubuntu/app - - - name: Connect & Deploy via SSH - uses: appleboy/ssh-action@v1.0.3 - with: - host: ${{ secrets.SERVER_HOST }} - username: ${{ secrets.SERVER_USER }} - key: ${{ secrets.SERVER_SSH_KEY }} - script: | - export SECRET_KEY="${{ secrets.SECRET_KEY }}" - export MYSQL_USER="${{ secrets.MYSQL_USER }}" - export MYSQL_PASSWORD="${{ secrets.MYSQL_PASSWORD }}" - export MYSQL_DATABASE="${{ secrets.MYSQL_DATABASE }}" - export MYSQL_ROOT_PASSWORD="${{ secrets.MYSQL_ROOT_PASSWORD }}" - - cd /home/ubuntu/app - docker compose -f docker-compose.app.yml pull - docker compose -f docker-compose.app.yml down - docker compose -f docker-compose.app.yml up -d --build diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 0503cca6..00000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,27 +0,0 @@ -name: CI - Build & Push Docker Image - -on: - push: - branches: [ develop ] - -jobs: - build-and-push: - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Log in to Docker Hub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - - - name: Build and push backend image - run: | - docker build -f Dockerfile-prod -t ${{ secrets.DOCKER_USERNAME }}/django-backend:latest . - docker push ${{ secrets.DOCKER_USERNAME }}/django-backend:latest diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml new file mode 100644 index 00000000..a8d6d083 --- /dev/null +++ b/.github/workflows/docker-build.yml @@ -0,0 +1,44 @@ +name: Docker Build + +on: + push: + branches: [develop] + pull_request: + branches: [develop] + +concurrency: + group: docker-${{ github.ref }} + cancel-in-progress: true + +jobs: + build: + name: ${{ matrix.service.name }} + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + service: + - name: main + dockerfile: docker/Dockerfile.main + - name: ocr + dockerfile: docker/Dockerfile.ocr + - name: alert + dockerfile: docker/Dockerfile.alert + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build ${{ matrix.service.name }} image + uses: docker/build-push-action@v6 + with: + context: . + file: ${{ matrix.service.dockerfile }} + push: false + cache-from: type=gha,scope=${{ matrix.service.name }} + cache-to: type=gha,mode=max,scope=${{ matrix.service.name }} + tags: speedcam/${{ matrix.service.name }}:ci-${{ github.sha }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000..75d160ca --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,45 @@ +name: Lint + +on: + push: + branches: [develop] + pull_request: + branches: [develop] + +concurrency: + group: lint-${{ github.ref }} + cancel-in-progress: true + +jobs: + lint: + name: Code Quality + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Cache pip + uses: actions/cache@v4 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-lint-${{ hashFiles('requirements/dev.txt') }} + restore-keys: | + ${{ runner.os }}-pip-lint- + + - name: Install lint dependencies + run: pip install flake8==7.1.1 black==24.10.0 isort==5.13.2 + + - name: Run flake8 + run: flake8 . --count --show-source --statistics + + - name: Run black (check) + run: black --check --diff --exclude='/(\.venv|migrations)/' . + + - name: Run isort (check) + run: isort --check-only --diff --profile black --skip .venv --skip migrations . diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000..7a46f974 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,108 @@ +name: Test + +on: + push: + branches: [develop] + pull_request: + branches: [develop] + +concurrency: + group: test-${{ github.ref }} + cancel-in-progress: true + +jobs: + test: + name: pytest + runs-on: ubuntu-latest + + services: + mysql: + image: mysql:8.0 + env: + MYSQL_ROOT_PASSWORD: testpass + MYSQL_DATABASE: speedcam + ports: + - 3306:3306 + options: >- + --health-cmd="mysqladmin ping -h 127.0.0.1 --silent" + --health-interval=10s + --health-timeout=5s + --health-retries=5 + --health-start-period=30s + + env: + DJANGO_SETTINGS_MODULE: config.settings.dev + DB_HOST: 127.0.0.1 + DB_PORT: 3306 + DB_USER: root + DB_PASSWORD: testpass + DB_NAME: speedcam + DB_NAME_VEHICLES: speedcam_vehicles + DB_NAME_DETECTIONS: speedcam_detections + DB_NAME_NOTIFICATIONS: speedcam_notifications + CELERY_BROKER_URL: memory:// + SECRET_KEY: ci-test-secret-key + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Cache pip + uses: actions/cache@v4 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-test-${{ hashFiles('requirements/test.txt') }} + restore-keys: | + ${{ runner.os }}-pip-test- + + - name: Install system dependencies + run: | + sudo apt-get update + sudo apt-get install -y default-libmysqlclient-dev pkg-config + + - name: Install Python dependencies + run: pip install -r requirements/test.txt + + - name: Create MSA databases + run: | + mysql -h 127.0.0.1 -u root -ptestpass -e " + CREATE DATABASE IF NOT EXISTS speedcam_vehicles CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; + CREATE DATABASE IF NOT EXISTS speedcam_detections CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; + CREATE DATABASE IF NOT EXISTS speedcam_notifications CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; + ALTER DATABASE speedcam CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; + " + + - name: Run migrations + run: | + python manage.py migrate --database=default + python manage.py migrate --database=vehicles_db + python manage.py migrate --database=detections_db + python manage.py migrate --database=notifications_db + + - name: Run tests + run: | + pytest \ + --cov=apps \ + --cov-report=term-missing \ + --cov-report=xml:coverage.xml \ + --junitxml=test-results.xml \ + -v --tb=short + + - name: Upload coverage report + if: always() + uses: actions/upload-artifact@v4 + with: + name: coverage-report + path: coverage.xml + + - name: Upload test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: test-results + path: test-results.xml diff --git a/.gitignore b/.gitignore index 4c13ac09..964eb881 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,81 @@ -.idea +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +*.egg-info/ +dist/ +build/ +eggs/ +*.egg + +# Virtual Environment +venv/ +env/ +ENV/ +.venv/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# Environment variables *.env +.env.* +!*.env.example + +# Credentials (실제 인증 파일 - 절대 커밋 금지) +credentials/*.json +!credentials/*.example.json + +# Legacy FCM 폴더 (마이그레이션 후 삭제 예정) +crud/fcm/*.json +!crud/fcm/*.example.json + +# Database +*.sqlite3 +*.db + +# Media files (uploaded content) +media/ +staticfiles/ + +# Logs *.log -*.pot -*.pyc -__pycache__/ -static \ No newline at end of file +logs/ + +# Celery +celerybeat-schedule +celerybeat.pid + +# Cache +.cache/ +.pytest_cache/ + +# Coverage +htmlcov/ +.coverage +.coverage.* +coverage.xml + +# OS +.DS_Store +Thumbs.db + +# pytest +.pytest_cache/ + +# mypy +.mypy_cache/ + +# Monitoring volumes +docker/monitoring/grafana/data/ +docker/monitoring/prometheus/data/ +docker/monitoring/loki/data/ + +# Claude Code +.claude/ diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 5d3df32d..00000000 --- a/Dockerfile +++ /dev/null @@ -1,11 +0,0 @@ -FROM python:3.10 - -# 작업 디렉토리 설정 -WORKDIR /app -# 의존성 설치 -COPY requirements.txt . -RUN apt-get update -RUN pip install --upgrade pip -RUN pip install -r requirements.txt -# 소스 코드 복사 -COPY . . \ No newline at end of file diff --git a/Dockerfile-prod b/Dockerfile-prod deleted file mode 100644 index 54a15ab3..00000000 --- a/Dockerfile-prod +++ /dev/null @@ -1,17 +0,0 @@ -FROM python:3.10 - -# 작업 디렉토리 설정 -WORKDIR /app -# 의존성 설치 -COPY requirements.txt . -RUN apt-get update -RUN pip install --upgrade pip -RUN pip install -r requirements.txt -# 소스 코드 복사 -COPY . . - -# 포트 오픈 -EXPOSE 8000 - -# Dockerfile-prod -CMD ["gunicorn", "config.wsgi:application", "--bind", "0.0.0.0:8000", "--workers=4", "--threads=2"] \ No newline at end of file diff --git a/README.md b/README.md index 1650d7c5..1b6ab05d 100644 --- a/README.md +++ b/README.md @@ -1 +1,383 @@ -# backend \ No newline at end of file +# Overspeed vehicle detection and alert system - with Qualcomm + +
+ +
2025.03.20-2024.05.30
+

https://autonotify.store

+

실시간 과속탐지 및 알림 시스템

+

Rubik Pi 보드에서 YOLO 기반 객체 감지와 속도 측정을 통해 과속 차량을 탐지하고,
+서버로 정보를 전송해 실시간 알림까지 제공하는 스마트 교통 시스템

+
+
+ +

🖥️ Demo

+

시연 영상

+https://www.youtube.com/watch?v=FDzbjOeika8 + +

메인페이지(과속 차량 목록보기)

+ +

과속 차량 개별 보기

+ +

알림 확인하기

+ + + + +
+
+
+ + +

🏛️ System Architechture

+image + +[자세히 보기](https://www.notion.so/2f33187fa1c980e1895cfef39b2c8ec7?pvs=21) + +### 기존 시스템 아키텍처의 문제점 + +기존 아키텍처는 다음과 위와 같은 구조(Before)를 가지고 있었습니다 + +이 구조에서 다음과 같은 **4가지 핵심 문제**가 발생했습니다. + +- OCR 동기 처리로 인한 서버 처리량 저하 +- 느린 응답으로 인한 Edge Device 블로킹 +- HTTP 기반 IoT 통신의 구조적 한계 +- OCR 장애가 전체 서비스에 전파 + +### 새로운 아키텍처의 설계 + +위 문제들을 해결하기 위해 **Event Driven Architecture**로 전환했습니다. + +- 비동기 이벤트 처리로 서버 처리량 극대화 +- 즉시 응답으로 Edge Device 해방 +- MQTT 프로토콜로 IoT 최적화 +- 완전한 장애 격리와 독립적 확장 + +### 정리 + +**Before vs After 비교** + +| 문제 영역 | Before | After | +| --- | --- | --- | +| **OCR 처리** | Django 동기 (블로킹) | OCR-Worker 비동기 | +| **응답 시간** | 3초+ | < 100ms | +| **IoT 프로토콜** | HTTP (오버헤드) | MQTT (경량, QoS) | +| **메시지 보장** | 없음 | At least once | +| **장애 격리** | 전체 영향 | 컴포넌트 격리 | +| **확장성** | 서버 전체 확장 | Worker별 독립 확장 | +| **데이터베이스** | 단일 DB | 서비스별 4개 DB | + +**기존 아키텍처의 근본적 한계**였던 **OCR 동기 처리**를 제거하고, **Event Driven Architecture**로 전환함으로써 + +1. **서버 처리량 극대화**: API 서버는 이벤트 발행만 담당, OCR은 별도 Worker가 병렬 처리 +2. **Edge Device 효율화**: 즉시 응답으로 연속 감지 가능, 데이터 유실 방지 +3. **IoT 최적화**: MQTT 프로토콜로 경량화, 메시지 전달 보장, 오프라인 대응 +4. **운영 안정성**: 장애 격리, 독립적 확장, 이벤트 보존으로 시스템 복원력 확보 +
+
+ +

🛠️ Tech Stack

+
+

Frontend

+ + + + + + +
+
+

Backend

+ + + + + + + +
+
+

Infra

+ + + + + + + + +
+
+

etc

+ + + + + + + +
+
+
+ +
+ +

Notification System Design

+ +

Django - RabbitMQ - Celery - FCM(3rd party Service) feat. Ack & Nack

+ +

Dead Letter Queue & Dead Letter Consumer

+ + + + +

reference

+ +- System Deisgn interview (Alex Xu) +- 분산 시스템에서 데이터를 전달하는 효율적인 방법 - nhn 김병부 + +

Rubik Pi 3

+Qualcomm 기반 Rubik Pi 하드웨어에서 YOLO 객체 탐지와 GStreamer를 활용해, +실시간으로 과속 차량을 감지하는 완전한 엣지 기반 시스템. +카메라 입력부터 추론, 트래킹, 속도 측정, 과속 차량 촬영까지 모든 과정을 로컬에서 처리하므로 클라우드 연산 불필요. + +## Rubik Tech Stack + +| Category | Technologies | +|----------------------|-----------------------------------------------------------------| +| **Hardware** | Rubik Pi 3, IMX477 image sensor, 10MP HQ Lens(16mm) | +| **Object Detection** | YOLOv5m | +| **Acceleration** | Qualcomm SNPE + TFLite delegate | +| **Pipeline** | GStreamer | +| **Programming** | Python | +| **Features** | On-device tracking, speed measurement, snapshot, multithreading | + +## Object Tracking (IoU) + + + +IoU를 계산하여, 다음프레임의 객체가 같은 객체인지 판단 + +## Speed Measurement + +### Method 1 (Not Used) + + + +프레임간 중심 좌표의 이동거리 변화로 속도를 측정 + +### Method 2 (✅Selected) + + + +가상의 두 선을 그어놓고, 두 선을 동과하는데 걸리는 시간을 측정 + +하지만, 이 방법은 가상의 두 선 사이의 실제 도로 거리를 알아야 정확히 측정 가능 + +## Multi Threading + +병목 현상을 최소화 하기 위해서 멀티 스레딩을 사용 + ++ 메인 스레드 ++ 트래킹, 속도 측정 스레드 ++ 사진촬영 및 전송 스레드 + +
+
+ +

📁 API

+

Swagger

+ +

Postman

+ + +
+ +

🔍 Monitoring

+

Portainer

+ + + +

RabbitMQ

+ + +

Flower(celery monitoring

+ + +
+

📓 How to Start

+ +### Clone Repository + +docker repository를 클론합니다. + + + +
+ Frontend + +### Install Packages + +패키지 설치를 합니다. + + ``` + npm install + ``` + +### Add Environment Files + +환경 파일을 생성해 줍니다. + +#### .env + + ``` + VITE_API_BASE_URL=http://localhost:8000/api + VITE_FIREBASE_API_KEY=YOUR_FIREBASE_API_KEY + VITE_FIREBASE_AUTH_DOMAIN=YOUR_FIREBASE_AUTH_DOMAIN + VITE_FIREBASE_PROJECT_ID=YOUR_FIREBASE_PROJECT_ID + VITE_FIREBASE_STORAGE_BUCKET=YOUR_FIREBASE_STORAGE_BUCKET + VITE_FIREBASE_MESSAGING_SENDER_ID=YOUR_SENDER_ID + VITE_FIREBASE_APP_ID=YOUR_FIREBASE_APP_ID + VITE_FIREBASE_VAPID_KEY=YOUR_FIREBASE_VAPID_KEY + ``` + +### Getting Started + +마지막으로 개발 서버를 열어줍니다. + + ``` + npm run dev + ``` + +### See Result + +http://localhost:5173 에 접속하여 결과물을 조회합니다. + +
+ + +
+ Backend + +### Add Environment Files(.env) + +**/.env** + + ``` + DATABASE_NAME= capstone + DATABASE_USER= sa + DATABASE_PASS= 1234 + DATABASE_HOST= + DATABASE_PORT= + SECRET_KEY= + + + ``` + + ``` + + + + ``` + +### Docker Run Command + +백엔드 서비스를 시작하기 위해 다음 Docker Compose 명령어를 실행합니다. + + ```bash + docker-compose -p teaml -f Solomon-Docker/docker-compose.prod.yml up -d -—build + ``` + +
+
+ +

Member

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Pictures + + + + + + + + + + + + + + + +
Name이상훈진민우최명헌서정찬
Position + Leader
+ Backend
+ DevOps
+ Design
+
+ Rubik Pi
+ Tracking
+ Calculate
+ YOLO
+
+ Backend
+
+ Frontend
+
GitHub + + + + + + + + + + + + + + + +
+ + + +
+
diff --git a/apps/__init__.py b/apps/__init__.py new file mode 100644 index 00000000..2fe8692a --- /dev/null +++ b/apps/__init__.py @@ -0,0 +1 @@ +# Django Apps Package diff --git a/apps/detections/__init__.py b/apps/detections/__init__.py new file mode 100644 index 00000000..7d273a34 --- /dev/null +++ b/apps/detections/__init__.py @@ -0,0 +1,2 @@ +# Detections App +default_app_config = "apps.detections.apps.DetectionsConfig" diff --git a/apps/detections/admin.py b/apps/detections/admin.py new file mode 100644 index 00000000..17d56fce --- /dev/null +++ b/apps/detections/admin.py @@ -0,0 +1,20 @@ +from django.contrib import admin + +from .models import Detection + + +@admin.register(Detection) +class DetectionAdmin(admin.ModelAdmin): + list_display = [ + "id", + "ocr_result", + "detected_speed", + "speed_limit", + "location", + "status", + "vehicle_id", + "detected_at", + ] + list_filter = ["status", "camera_id", "detected_at"] + search_fields = ["ocr_result", "location", "camera_id"] + readonly_fields = ["created_at", "updated_at"] diff --git a/apps/detections/apps.py b/apps/detections/apps.py new file mode 100644 index 00000000..6f62389b --- /dev/null +++ b/apps/detections/apps.py @@ -0,0 +1,7 @@ +from django.apps import AppConfig + + +class DetectionsConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "apps.detections" + verbose_name = "과속 감지" diff --git a/apps/detections/migrations/0001_initial.py b/apps/detections/migrations/0001_initial.py new file mode 100644 index 00000000..68b67c3a --- /dev/null +++ b/apps/detections/migrations/0001_initial.py @@ -0,0 +1,129 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="Detection", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "vehicle_id", + models.BigIntegerField( + blank=True, + db_index=True, + null=True, + verbose_name="차량 ID", + ), + ), + ( + "detected_speed", + models.FloatField(verbose_name="감지 속도"), + ), + ( + "speed_limit", + models.FloatField(default=60.0, verbose_name="제한 속도"), + ), + ( + "location", + models.CharField( + blank=True, max_length=255, null=True, verbose_name="위치" + ), + ), + ( + "camera_id", + models.CharField( + blank=True, + max_length=50, + null=True, + verbose_name="카메라 ID", + ), + ), + ( + "image_gcs_uri", + models.CharField( + max_length=500, verbose_name="GCS 이미지 경로" + ), + ), + ( + "ocr_result", + models.CharField( + blank=True, max_length=20, null=True, verbose_name="OCR 결과" + ), + ), + ( + "ocr_confidence", + models.FloatField( + blank=True, null=True, verbose_name="OCR 신뢰도" + ), + ), + ( + "detected_at", + models.DateTimeField(verbose_name="감지 시간"), + ), + ( + "processed_at", + models.DateTimeField( + blank=True, null=True, verbose_name="처리 완료 시간" + ), + ), + ( + "status", + models.CharField( + choices=[ + ("pending", "Pending"), + ("processing", "Processing"), + ("completed", "Completed"), + ("failed", "Failed"), + ], + default="pending", + max_length=20, + verbose_name="상태", + ), + ), + ( + "error_message", + models.TextField( + blank=True, null=True, verbose_name="에러 메시지" + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ], + options={ + "verbose_name": "감지 내역", + "verbose_name_plural": "감지 내역 목록", + "db_table": "detections", + "ordering": ["-detected_at"], + "indexes": [ + models.Index( + fields=["vehicle_id"], name="detections_vehicle_id_idx" + ), + models.Index( + fields=["detected_at"], name="detections_detected_at_idx" + ), + models.Index( + fields=["status", "created_at"], + name="detections_status_created_idx", + ), + models.Index( + fields=["camera_id", "detected_at"], + name="detections_camera_detected_idx", + ), + ], + }, + ), + ] diff --git a/apps/detections/migrations/__init__.py b/apps/detections/migrations/__init__.py new file mode 100644 index 00000000..f2e134d0 --- /dev/null +++ b/apps/detections/migrations/__init__.py @@ -0,0 +1,2 @@ +# Detections app migrations + diff --git a/apps/detections/models.py b/apps/detections/models.py new file mode 100644 index 00000000..2ab2d748 --- /dev/null +++ b/apps/detections/models.py @@ -0,0 +1,58 @@ +from django.db import models + + +class Detection(models.Model): + """ + 과속 감지 내역 + + MSA 구조: FK 대신 ID로 다른 서비스 데이터 참조 + - vehicle_id: Vehicles Service의 Vehicle ID + """ + + STATUS_CHOICES = [ + ("pending", "Pending"), + ("processing", "Processing"), + ("completed", "Completed"), + ("failed", "Failed"), + ] + + # MSA: FK 대신 ID로 참조 (Vehicles Service) + vehicle_id = models.BigIntegerField(null=True, blank=True, verbose_name="차량 ID") + detected_speed = models.FloatField(verbose_name="감지 속도") + speed_limit = models.FloatField(default=60.0, verbose_name="제한 속도") + location = models.CharField( + max_length=255, blank=True, null=True, verbose_name="위치" + ) + camera_id = models.CharField( + max_length=50, blank=True, null=True, verbose_name="카메라 ID" + ) + image_gcs_uri = models.CharField(max_length=500, verbose_name="GCS 이미지 경로") + ocr_result = models.CharField( + max_length=20, blank=True, null=True, verbose_name="OCR 결과" + ) + ocr_confidence = models.FloatField(blank=True, null=True, verbose_name="OCR 신뢰도") + detected_at = models.DateTimeField(verbose_name="감지 시간") + processed_at = models.DateTimeField( + blank=True, null=True, verbose_name="처리 완료 시간" + ) + status = models.CharField( + max_length=20, choices=STATUS_CHOICES, default="pending", verbose_name="상태" + ) + error_message = models.TextField(blank=True, null=True, verbose_name="에러 메시지") + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + db_table = "detections" + verbose_name = "감지 내역" + verbose_name_plural = "감지 내역 목록" + ordering = ["-detected_at"] + indexes = [ + models.Index(fields=["vehicle_id"]), + models.Index(fields=["detected_at"]), + models.Index(fields=["status", "created_at"]), + models.Index(fields=["camera_id", "detected_at"]), + ] + + def __str__(self): + return f"{self.ocr_result or 'Unknown'} - {self.detected_speed}km/h" diff --git a/apps/detections/serializers.py b/apps/detections/serializers.py new file mode 100644 index 00000000..addc4bb7 --- /dev/null +++ b/apps/detections/serializers.py @@ -0,0 +1,58 @@ +from rest_framework import serializers + +from .models import Detection + + +class DetectionSerializer(serializers.ModelSerializer): + """Detection 상세 Serializer""" + + class Meta: + model = Detection + fields = [ + "id", + "vehicle_id", + "detected_speed", + "speed_limit", + "location", + "camera_id", + "image_gcs_uri", + "ocr_result", + "ocr_confidence", + "detected_at", + "processed_at", + "status", + "error_message", + "created_at", + "updated_at", + ] + read_only_fields = ["id", "created_at", "updated_at"] + + +class DetectionListSerializer(serializers.ModelSerializer): + """목록 조회용 간략 Serializer""" + + class Meta: + model = Detection + fields = [ + "id", + "vehicle_id", + "detected_speed", + "speed_limit", + "location", + "camera_id", + "ocr_result", + "status", + "detected_at", + "processed_at", + ] + + +class DetectionStatisticsSerializer(serializers.Serializer): + """통계 데이터 Serializer""" + + total_detections = serializers.IntegerField() + completed_count = serializers.IntegerField() + failed_count = serializers.IntegerField() + pending_count = serializers.IntegerField() + avg_speed = serializers.FloatField() + max_speed = serializers.FloatField() diff --git a/apps/detections/urls.py b/apps/detections/urls.py new file mode 100644 index 00000000..20bac70e --- /dev/null +++ b/apps/detections/urls.py @@ -0,0 +1,11 @@ +from django.urls import include, path +from rest_framework.routers import DefaultRouter + +from .views import DetectionViewSet + +router = DefaultRouter() +router.register(r"detections", DetectionViewSet, basename="detection") + +urlpatterns = [ + path("", include(router.urls)), +] diff --git a/apps/detections/views.py b/apps/detections/views.py new file mode 100644 index 00000000..3a3a469e --- /dev/null +++ b/apps/detections/views.py @@ -0,0 +1,80 @@ +from django.db.models import Avg, Count, Max +from django_filters.rest_framework import DjangoFilterBackend +from rest_framework import filters, viewsets +from rest_framework.decorators import action +from rest_framework.response import Response + +from .models import Detection +from .serializers import ( + DetectionListSerializer, + DetectionSerializer, + DetectionStatisticsSerializer, +) + + +class DetectionViewSet(viewsets.ReadOnlyModelViewSet): + """과속 감지 내역 API (MSA: detections_db 사용)""" + + queryset = Detection.objects.using("detections_db").all() + serializer_class = DetectionSerializer + filter_backends = [DjangoFilterBackend, filters.OrderingFilter] + filterset_fields = ["status", "camera_id", "location"] + ordering_fields = ["detected_at", "created_at", "detected_speed"] + ordering = ["-detected_at"] + + def get_serializer_class(self): + if self.action == "list": + return DetectionListSerializer + return DetectionSerializer + + @action(detail=False, methods=["get"]) + def pending(self, request): + """판독 중인 차량 목록""" + pending_detections = self.queryset.filter(status__in=["pending", "processing"]) + serializer = DetectionListSerializer(pending_detections, many=True) + return Response(serializer.data) + + @action(detail=False, methods=["get"]) + def statistics(self, request): + """위반 통계""" + from datetime import timedelta + + from django.utils import timezone + + queryset = Detection.objects.using("detections_db").all() + + # 기간 필터 (선택) + period = request.query_params.get("period") + period_map = { + "today": timedelta(days=1), + "week": timedelta(weeks=1), + "month": timedelta(days=30), + } + if period in period_map: + queryset = queryset.filter( + detected_at__gte=timezone.now() - period_map[period] + ) + + # 카메라 필터 (선택) + camera_id = request.query_params.get("camera_id") + if camera_id: + queryset = queryset.filter(camera_id=camera_id) + + stats = queryset.aggregate( + total_detections=Count("id"), + avg_speed=Avg("detected_speed"), + max_speed=Max("detected_speed"), + ) + + stats["completed_count"] = queryset.filter(status="completed").count() + stats["failed_count"] = queryset.filter(status="failed").count() + stats["pending_count"] = queryset.filter( + status__in=["pending", "processing"] + ).count() + + # None 값 처리 + stats["avg_speed"] = stats["avg_speed"] or 0 + stats["max_speed"] = stats["max_speed"] or 0 + + serializer = DetectionStatisticsSerializer(stats) + return Response(serializer.data) diff --git a/apps/notifications/__init__.py b/apps/notifications/__init__.py new file mode 100644 index 00000000..5a4577be --- /dev/null +++ b/apps/notifications/__init__.py @@ -0,0 +1,2 @@ +# Notifications App +default_app_config = "apps.notifications.apps.NotificationsConfig" diff --git a/apps/notifications/admin.py b/apps/notifications/admin.py new file mode 100644 index 00000000..910a2e93 --- /dev/null +++ b/apps/notifications/admin.py @@ -0,0 +1,11 @@ +from django.contrib import admin + +from .models import Notification + + +@admin.register(Notification) +class NotificationAdmin(admin.ModelAdmin): + list_display = ["id", "detection_id", "title", "status", "retry_count", "sent_at"] + list_filter = ["status", "sent_at"] + search_fields = ["title", "body", "fcm_token"] + readonly_fields = ["created_at"] diff --git a/apps/notifications/apps.py b/apps/notifications/apps.py new file mode 100644 index 00000000..02b52625 --- /dev/null +++ b/apps/notifications/apps.py @@ -0,0 +1,7 @@ +from django.apps import AppConfig + + +class NotificationsConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "apps.notifications" + verbose_name = "알림" diff --git a/apps/notifications/migrations/0001_initial.py b/apps/notifications/migrations/0001_initial.py new file mode 100644 index 00000000..12998521 --- /dev/null +++ b/apps/notifications/migrations/0001_initial.py @@ -0,0 +1,98 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="Notification", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "detection_id", + models.BigIntegerField( + db_index=True, verbose_name="감지 내역 ID" + ), + ), + ( + "fcm_token", + models.CharField( + blank=True, max_length=255, null=True, verbose_name="FCM 토큰" + ), + ), + ( + "title", + models.CharField( + blank=True, max_length=255, null=True, verbose_name="알림 제목" + ), + ), + ( + "body", + models.TextField( + blank=True, null=True, verbose_name="알림 내용" + ), + ), + ( + "sent_at", + models.DateTimeField( + blank=True, null=True, verbose_name="전송 시간" + ), + ), + ( + "status", + models.CharField( + choices=[ + ("pending", "Pending"), + ("sent", "Sent"), + ("failed", "Failed"), + ], + default="pending", + max_length=20, + verbose_name="상태", + ), + ), + ( + "retry_count", + models.IntegerField(default=0, verbose_name="재시도 횟수"), + ), + ( + "error_message", + models.TextField( + blank=True, null=True, verbose_name="에러 메시지" + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ], + options={ + "verbose_name": "알림", + "verbose_name_plural": "알림 목록", + "db_table": "notifications", + "ordering": ["-created_at"], + "indexes": [ + models.Index( + fields=["detection_id"], + name="notifications_detection_id_idx", + ), + models.Index( + fields=["status", "retry_count"], + name="notifications_status_retry_idx", + ), + models.Index( + fields=["sent_at"], name="notifications_sent_at_idx" + ), + ], + }, + ), + ] diff --git a/apps/notifications/migrations/__init__.py b/apps/notifications/migrations/__init__.py new file mode 100644 index 00000000..e1080203 --- /dev/null +++ b/apps/notifications/migrations/__init__.py @@ -0,0 +1,2 @@ +# Notifications app migrations + diff --git a/apps/notifications/models.py b/apps/notifications/models.py new file mode 100644 index 00000000..49efd976 --- /dev/null +++ b/apps/notifications/models.py @@ -0,0 +1,47 @@ +from django.db import models + + +class Notification(models.Model): + """ + 알림 전송 이력 + + MSA 구조: FK 대신 ID로 다른 서비스 데이터 참조 + - detection_id: Detections Service의 Detection ID + """ + + STATUS_CHOICES = [ + ("pending", "Pending"), + ("sent", "Sent"), + ("failed", "Failed"), + ] + + # MSA: FK 대신 ID로 참조 (Detections Service) + detection_id = models.BigIntegerField(verbose_name="감지 내역 ID") + fcm_token = models.CharField( + max_length=255, blank=True, null=True, verbose_name="FCM 토큰" + ) + title = models.CharField( + max_length=255, blank=True, null=True, verbose_name="알림 제목" + ) + body = models.TextField(blank=True, null=True, verbose_name="알림 내용") + sent_at = models.DateTimeField(blank=True, null=True, verbose_name="전송 시간") + status = models.CharField( + max_length=20, choices=STATUS_CHOICES, default="pending", verbose_name="상태" + ) + retry_count = models.IntegerField(default=0, verbose_name="재시도 횟수") + error_message = models.TextField(blank=True, null=True, verbose_name="에러 메시지") + created_at = models.DateTimeField(auto_now_add=True) + + class Meta: + db_table = "notifications" + verbose_name = "알림" + verbose_name_plural = "알림 목록" + ordering = ["-created_at"] + indexes = [ + models.Index(fields=["detection_id"]), + models.Index(fields=["status", "retry_count"]), + models.Index(fields=["sent_at"]), + ] + + def __str__(self): + return f"Notification for Detection #{self.detection_id} - {self.status}" diff --git a/apps/notifications/serializers.py b/apps/notifications/serializers.py new file mode 100644 index 00000000..348956a4 --- /dev/null +++ b/apps/notifications/serializers.py @@ -0,0 +1,30 @@ +from rest_framework import serializers + +from .models import Notification + + +class NotificationSerializer(serializers.ModelSerializer): + """Notification 상세 Serializer""" + + class Meta: + model = Notification + fields = [ + "id", + "detection_id", + "title", + "body", + "sent_at", + "status", + "retry_count", + "error_message", + "created_at", + ] + read_only_fields = ["id", "created_at"] + + +class NotificationListSerializer(serializers.ModelSerializer): + """목록 조회용 간략 Serializer""" + + class Meta: + model = Notification + fields = ["id", "detection_id", "title", "status", "sent_at", "retry_count"] diff --git a/apps/notifications/urls.py b/apps/notifications/urls.py new file mode 100644 index 00000000..699a2f2b --- /dev/null +++ b/apps/notifications/urls.py @@ -0,0 +1,11 @@ +from django.urls import include, path +from rest_framework.routers import DefaultRouter + +from .views import NotificationViewSet + +router = DefaultRouter() +router.register(r"notifications", NotificationViewSet, basename="notification") + +urlpatterns = [ + path("", include(router.urls)), +] diff --git a/apps/notifications/views.py b/apps/notifications/views.py new file mode 100644 index 00000000..4abc1b41 --- /dev/null +++ b/apps/notifications/views.py @@ -0,0 +1,21 @@ +from django_filters.rest_framework import DjangoFilterBackend +from rest_framework import filters, viewsets + +from .models import Notification +from .serializers import NotificationListSerializer, NotificationSerializer + + +class NotificationViewSet(viewsets.ReadOnlyModelViewSet): + """알림 이력 API (MSA: notifications_db 사용)""" + + queryset = Notification.objects.using("notifications_db").all() + serializer_class = NotificationSerializer + filter_backends = [DjangoFilterBackend, filters.OrderingFilter] + filterset_fields = ["status", "detection_id"] + ordering_fields = ["sent_at", "created_at"] + ordering = ["-created_at"] + + def get_serializer_class(self): + if self.action == "list": + return NotificationListSerializer + return NotificationSerializer diff --git a/apps/vehicles/__init__.py b/apps/vehicles/__init__.py new file mode 100644 index 00000000..206a7301 --- /dev/null +++ b/apps/vehicles/__init__.py @@ -0,0 +1,2 @@ +# Vehicles App +default_app_config = "apps.vehicles.apps.VehiclesConfig" diff --git a/apps/vehicles/admin.py b/apps/vehicles/admin.py new file mode 100644 index 00000000..e9c5a9df --- /dev/null +++ b/apps/vehicles/admin.py @@ -0,0 +1,18 @@ +from django.contrib import admin + +from .models import Vehicle + + +@admin.register(Vehicle) +class VehicleAdmin(admin.ModelAdmin): + list_display = ["id", "plate_number", "owner_name", "fcm_token_short", "created_at"] + list_filter = ["created_at"] + search_fields = ["plate_number", "owner_name", "owner_phone"] + readonly_fields = ["created_at", "updated_at"] + + def fcm_token_short(self, obj): + if obj.fcm_token: + return f"{obj.fcm_token[:30]}..." + return "-" + + fcm_token_short.short_description = "FCM Token" diff --git a/apps/vehicles/apps.py b/apps/vehicles/apps.py new file mode 100644 index 00000000..dd6b6dbd --- /dev/null +++ b/apps/vehicles/apps.py @@ -0,0 +1,7 @@ +from django.apps import AppConfig + + +class VehiclesConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "apps.vehicles" + verbose_name = "차량 관리" diff --git a/apps/vehicles/migrations/0001_initial.py b/apps/vehicles/migrations/0001_initial.py new file mode 100644 index 00000000..bf86f7a4 --- /dev/null +++ b/apps/vehicles/migrations/0001_initial.py @@ -0,0 +1,64 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="Vehicle", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "plate_number", + models.CharField( + max_length=20, unique=True, verbose_name="차량 번호" + ), + ), + ( + "owner_name", + models.CharField( + blank=True, max_length=100, null=True, verbose_name="소유자명" + ), + ), + ( + "owner_phone", + models.CharField( + blank=True, max_length=20, null=True, verbose_name="연락처" + ), + ), + ( + "fcm_token", + models.CharField( + blank=True, max_length=255, null=True, verbose_name="FCM 토큰" + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ], + options={ + "verbose_name": "차량", + "verbose_name_plural": "차량 목록", + "db_table": "vehicles", + "indexes": [ + models.Index( + fields=["plate_number"], name="vehicles_plate_number_idx" + ), + models.Index( + fields=["fcm_token"], name="vehicles_fcm_token_idx" + ), + ], + }, + ), + ] diff --git a/apps/vehicles/migrations/__init__.py b/apps/vehicles/migrations/__init__.py new file mode 100644 index 00000000..6da95f0c --- /dev/null +++ b/apps/vehicles/migrations/__init__.py @@ -0,0 +1,2 @@ +# Vehicles app migrations + diff --git a/apps/vehicles/models.py b/apps/vehicles/models.py new file mode 100644 index 00000000..6c660dae --- /dev/null +++ b/apps/vehicles/models.py @@ -0,0 +1,35 @@ +from django.db import models + + +class Vehicle(models.Model): + """ + 차량 정보 (FCM 토큰 포함) + + MSA 구조: vehicles_db에 저장 + """ + + plate_number = models.CharField( + max_length=20, unique=True, verbose_name="차량 번호" + ) + owner_name = models.CharField( + max_length=100, blank=True, null=True, verbose_name="소유자명" + ) + owner_phone = models.CharField( + max_length=20, blank=True, null=True, verbose_name="연락처" + ) + fcm_token = models.CharField( + max_length=255, blank=True, null=True, verbose_name="FCM 토큰" + ) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + db_table = "vehicles" + verbose_name = "차량" + verbose_name_plural = "차량 목록" + indexes = [ + models.Index(fields=["fcm_token"]), + ] + + def __str__(self): + return self.plate_number diff --git a/apps/vehicles/serializers.py b/apps/vehicles/serializers.py new file mode 100644 index 00000000..5ef9473e --- /dev/null +++ b/apps/vehicles/serializers.py @@ -0,0 +1,28 @@ +from rest_framework import serializers + +from .models import Vehicle + + +class VehicleSerializer(serializers.ModelSerializer): + class Meta: + model = Vehicle + fields = [ + "id", + "plate_number", + "owner_name", + "owner_phone", + "fcm_token", + "created_at", + "updated_at", + ] + read_only_fields = ["id", "created_at", "updated_at"] + + +class VehicleCreateSerializer(serializers.ModelSerializer): + class Meta: + model = Vehicle + fields = ["plate_number", "owner_name", "owner_phone", "fcm_token"] + + +class FCMTokenUpdateSerializer(serializers.Serializer): + fcm_token = serializers.CharField(max_length=255) diff --git a/apps/vehicles/urls.py b/apps/vehicles/urls.py new file mode 100644 index 00000000..586ce7a7 --- /dev/null +++ b/apps/vehicles/urls.py @@ -0,0 +1,11 @@ +from django.urls import include, path +from rest_framework.routers import DefaultRouter + +from .views import VehicleViewSet + +router = DefaultRouter() +router.register(r"vehicles", VehicleViewSet, basename="vehicle") + +urlpatterns = [ + path("", include(router.urls)), +] diff --git a/apps/vehicles/views.py b/apps/vehicles/views.py new file mode 100644 index 00000000..f36c8492 --- /dev/null +++ b/apps/vehicles/views.py @@ -0,0 +1,86 @@ +import logging +import os + +from rest_framework import status, viewsets +from rest_framework.decorators import action +from rest_framework.response import Response + +from .models import Vehicle +from .serializers import ( + FCMTokenUpdateSerializer, + VehicleCreateSerializer, + VehicleSerializer, +) + +logger = logging.getLogger(__name__) + + +class VehicleViewSet(viewsets.ModelViewSet): + """차량 정보 관리 API (MSA: vehicles_db 사용)""" + + queryset = Vehicle.objects.using("vehicles_db").all() + serializer_class = VehicleSerializer + + def get_serializer_class(self): + if self.action == "create": + return VehicleCreateSerializer + return VehicleSerializer + + def perform_create(self, serializer): + """생성 시 vehicles_db에 저장 (Router가 자동 라우팅)""" + serializer.save() + + def perform_update(self, serializer): + """업데이트 시 vehicles_db 사용 (Router가 자동 라우팅)""" + serializer.save() + + @action(detail=True, methods=["patch"], url_path="fcm-token") + def update_fcm_token(self, request, pk=None): + """FCM 토큰 업데이트""" + vehicle = self.get_object() + serializer = FCMTokenUpdateSerializer(data=request.data) + + if serializer.is_valid(): + vehicle.fcm_token = serializer.validated_data["fcm_token"] + vehicle.save(using="vehicles_db", update_fields=["fcm_token", "updated_at"]) + return Response(VehicleSerializer(vehicle).data) + + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + @action(detail=False, methods=["post"], url_path="register-fcm") + def register_fcm(self, request): + """번호판 기반 FCM 토큰 등록""" + plate_number = request.data.get("plate_number") + fcm_token = request.data.get("fcm_token") + + if not plate_number or not fcm_token: + return Response( + {"error": "plate_number and fcm_token are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + vehicle, created = Vehicle.objects.using("vehicles_db").update_or_create( + plate_number=plate_number, defaults={"fcm_token": fcm_token} + ) + + # Dashboard 토큰은 FCM 토픽에 구독 + if plate_number == "DASHBOARD": + try: + FCM_MOCK = os.getenv("FCM_MOCK", "false").lower() == "true" + if FCM_MOCK: + logger.info( + "[MOCK] Would subscribe token to dashboard_alerts topic" + ) + else: + from core.firebase.fcm import get_fcm_client + + fcm_client = get_fcm_client() + fcm_client.subscribe_to_topic([fcm_token], "dashboard_alerts") + logger.info("Dashboard token subscribed to dashboard_alerts topic") + except Exception as e: + logger.warning(f"Failed to subscribe dashboard token to topic: {e}") + + return Response( + VehicleSerializer(vehicle).data, + status=status.HTTP_201_CREATED if created else status.HTTP_200_OK, + ) diff --git a/backend.env.example b/backend.env.example new file mode 100644 index 00000000..bc51cb14 --- /dev/null +++ b/backend.env.example @@ -0,0 +1,93 @@ +# =========================================== +# SpeedCam Backend Environment Variables +# =========================================== +# 사용법: 이 파일을 backend.env로 복사하여 사용 +# cp backend.env.example backend.env + +# =========================================== +# Django 설정 +# =========================================== +SECRET_KEY=your-django-secret-key-here +DJANGO_SETTINGS_MODULE=config.settings.dev +DEBUG=True + +# =========================================== +# 데이터베이스 설정 (MySQL - MSA) +# =========================================== +# 공통 연결 정보 +DB_HOST=mysql +DB_PORT=3306 +DB_USER=sa +DB_PASSWORD=1234 + +# MSA - 서비스별 독립 데이터베이스 +DB_NAME=speedcam +DB_NAME_VEHICLES=speedcam_vehicles +DB_NAME_DETECTIONS=speedcam_detections +DB_NAME_NOTIFICATIONS=speedcam_notifications + +# =========================================== +# RabbitMQ / Celery 설정 +# =========================================== +CELERY_BROKER_URL=amqp://sa:1234@rabbitmq:5672// +RABBITMQ_HOST=rabbitmq + +# MQTT 설정 (RabbitMQ MQTT Plugin) +MQTT_PORT=1883 +MQTT_USER=sa +MQTT_PASS=1234 + +# =========================================== +# GCS (Google Cloud Storage) 설정 +# =========================================== +# OCR_MOCK=false일 때만 필요 +GOOGLE_APPLICATION_CREDENTIALS=/-path(secret)-/gcp-cloud-storage.json +# GCS_BUCKET_NAME은 불필요 (GCS URI에서 자동 파싱) + +# =========================================== +# Firebase 설정 (FCM Push Notification) +# =========================================== +FIREBASE_CREDENTIALS=/-path(secret)-/firebase-service-account.json + +# =========================================== +# Celery Worker 설정 +# =========================================== +# OCR Worker (CPU 집약적 - prefork pool) +OCR_CONCURRENCY=2 + +# Alert Worker (I/O 집약적 - gevent pool) +ALERT_CONCURRENCY=50 + +# =========================================== +# Mock 설정 (로컬 개발용) +# =========================================== +# true: 실제 GCS/FCM 호출 없이 Mock 응답 반환 +# false: 실제 서비스 호출 (credentials 필요) +OCR_MOCK=true +FCM_MOCK=true + +# =========================================== +# Gunicorn 설정 +# =========================================== +GUNICORN_WORKERS=4 +GUNICORN_THREADS=2 + +# =========================================== +# 로깅 설정 +# =========================================== +LOG_LEVEL=info + +# =========================================== +# CORS 설정 +# =========================================== +CORS_ALLOWED_ORIGINS=http://localhost:5173,http://localhost:3000 + +# =========================================== +# OpenTelemetry 설정 +# =========================================== +OTEL_EXPORTER_OTLP_ENDPOINT=http://otel-collector:4317 +OTEL_EXPORTER_OTLP_PROTOCOL=grpc +OTEL_RESOURCE_ATTRIBUTES=service.namespace=speedcam,deployment.environment=dev +# Valid values: always_on, always_off, traceidratio, parentbased_always_on, parentbased_always_off, parentbased_traceidratio +OTEL_TRACES_SAMPLER=parentbased_always_on +OTEL_PYTHON_LOG_CORRELATION=true diff --git a/backend/settings.py b/backend/settings.py deleted file mode 100644 index 3b9fb387..00000000 --- a/backend/settings.py +++ /dev/null @@ -1,127 +0,0 @@ -import os -from dotenv import load_dotenv -from pathlib import Path -import pymysql - -# PyMySQL을 MySQLdb로 인식하게 설정 (mysqlclient 없이 사용 가능) -pymysql.install_as_MySQLdb() - -# BASE_DIR 설정 -BASE_DIR = Path(__file__).resolve().parent.parent - -# .env 파일 로드 (backend.env만 사용) -env_path = os.path.join(BASE_DIR, "backend.env") -if os.path.exists(env_path): - load_dotenv(env_path) - -# SECRET_KEY 로드 -SECRET_KEY = os.getenv("SECRET_KEY") - -# DEBUG 설정 -# DEBUG = os.getenv("DEBUG", "True").lower() == "true" -DEBUG = True - -# ALLOWED_HOSTS 설정 -# ALLOWED_HOSTS = os.getenv("ALLOWED_HOSTS", "localhost,127.0.0.1").split(",") -ALLOWED_HOSTS = ["*"] -# Application definition -INSTALLED_APPS = [ - "django.contrib.admin", - "django.contrib.auth", - "django.contrib.contenttypes", - "django.contrib.sessions", - "django.contrib.messages", - "django.contrib.staticfiles", - "post", - "rest_framework", - "drf_yasg", # Swagger 추가 -] - -MIDDLEWARE = [ - "django.middleware.security.SecurityMiddleware", - "django.contrib.sessions.middleware.SessionMiddleware", - "django.middleware.common.CommonMiddleware", - "django.middleware.csrf.CsrfViewMiddleware", - "django.contrib.auth.middleware.AuthenticationMiddleware", - "django.contrib.messages.middleware.MessageMiddleware", - "django.middleware.clickjacking.XFrameOptionsMiddleware", -] - -ROOT_URLCONF = "backend.urls" - -TEMPLATES = [ - { - "BACKEND": "django.template.backends.django.DjangoTemplates", - "DIRS": [], - "APP_DIRS": True, - "OPTIONS": { - "context_processors": [ - "django.template.context_processors.debug", - "django.template.context_processors.request", - "django.contrib.auth.context_processors.auth", - "django.contrib.messages.context_processors.messages", - ], - }, - }, -] - -WSGI_APPLICATION = "backend.wsgi.application" - -# ✅ Database 설정 (환경 변수 기반) -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.mysql', - 'NAME': 'capstone', # docker-compose에서 설정한 MYSQL_DATABASE 값 - 'USER': 'sa', # docker-compose에서 설정한 MYSQL_USER 값 - 'PASSWORD': '1234', # docker-compose에서 설정한 MYSQL_PASSWORD 값 - 'HOST': 'mysqldb', # docker-compose의 서비스 이름 (컨테이너 내부에서 접속할 때 사용) - 'PORT': 3306, # 기본 MySQL 포트 - 'OPTIONS': { - 'charset': 'utf8mb4', # UTF-8 문자 인코딩 설정 (이모지 지원 포함) - }, - } -} - -# Password validation -AUTH_PASSWORD_VALIDATORS = [ - { - "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", - }, - { - "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", - }, - { - "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", - }, - { - "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", - }, -] - -# Internationalization -LANGUAGE_CODE = "ko-kr" -TIME_ZONE = "Asia/Seoul" -USE_I18N = True -USE_TZ = True - -# Static files -STATIC_URL = "/static/" -STATIC_ROOT = os.path.join(BASE_DIR, "static") - -# Default primary key field type -DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" - -# Swagger 설정 -SWAGGER_SETTINGS = { - "SECURITY_DEFINITIONS": { - "Bearer": { - "type": "apiKey", - "name": "Authorization", - "in": "header", - } - }, - "USE_SESSION_AUTH": False, -} - - -CORS_ORIGIN_ALLOW_ALL = True \ No newline at end of file diff --git a/backend/settings/base.py b/backend/settings/base.py deleted file mode 100644 index 69ee5b03..00000000 --- a/backend/settings/base.py +++ /dev/null @@ -1,73 +0,0 @@ -# backend/settings/base.py -import os -from pathlib import Path -import pymysql - -pymysql.install_as_MySQLdb() - -BASE_DIR = Path(__file__).resolve().parent.parent.parent - -SECRET_KEY = os.getenv("SECRET_KEY", "insecure-key") - -INSTALLED_APPS = [ - "django.contrib.admin", - "django.contrib.auth", - "django.contrib.contenttypes", - "django.contrib.sessions", - "django.contrib.messages", - "django.contrib.staticfiles", - "post", - "rest_framework", - "drf_yasg", -] - -MIDDLEWARE = [ - "django.middleware.security.SecurityMiddleware", - "django.contrib.sessions.middleware.SessionMiddleware", - "django.middleware.common.CommonMiddleware", - "django.middleware.csrf.CsrfViewMiddleware", - "django.contrib.auth.middleware.AuthenticationMiddleware", - "django.contrib.messages.middleware.MessageMiddleware", - "django.middleware.clickjacking.XFrameOptionsMiddleware", -] - -ROOT_URLCONF = "backend.urls" - -TEMPLATES = [ - { - "BACKEND": "django.template.backends.django.DjangoTemplates", - "DIRS": [], - "APP_DIRS": True, - "OPTIONS": { - "context_processors": [ - "django.template.context_processors.debug", - "django.template.context_processors.request", - "django.contrib.auth.context_processors.auth", - "django.contrib.messages.context_processors.messages", - ], - }, - }, -] - -WSGI_APPLICATION = "backend.wsgi.application" - -LANGUAGE_CODE = "ko-kr" -TIME_ZONE = "Asia/Seoul" -USE_I18N = True -USE_TZ = True - -STATIC_URL = "/static/" -STATIC_ROOT = os.path.join(BASE_DIR, "static") - -DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" - -SWAGGER_SETTINGS = { - "SECURITY_DEFINITIONS": { - "Bearer": { - "type": "apiKey", - "name": "Authorization", - "in": "header", - } - }, - "USE_SESSION_AUTH": False, -} diff --git a/backend/settings/dev.py b/backend/settings/dev.py deleted file mode 100644 index abeabfd7..00000000 --- a/backend/settings/dev.py +++ /dev/null @@ -1,26 +0,0 @@ -# backend/settings/dev.py -from .base import * -from dotenv import load_dotenv - -env_path = os.path.join(BASE_DIR, "backend.env") -if os.path.exists(env_path): - load_dotenv(env_path) - -DEBUG = True -ALLOWED_HOSTS = ["*"] - -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.mysql', - 'NAME': os.getenv('DB_NAME', 'capstone'), - 'USER': os.getenv('DB_USER', 'sa'), - 'PASSWORD': os.getenv('DB_PASSWORD', '1234'), - 'HOST': os.getenv('DB_HOST', 'mysqldb'), # ✅ 기본값은 'mysqldb'로 설정 - 'PORT': int(os.getenv('DB_PORT', 3306)), - 'OPTIONS': { - 'charset': 'utf8mb4', - }, - } -} - -CORS_ORIGIN_ALLOW_ALL = True diff --git a/backend/settings/prod.py b/backend/settings/prod.py deleted file mode 100644 index b42385d2..00000000 --- a/backend/settings/prod.py +++ /dev/null @@ -1,21 +0,0 @@ -# backend/settings/prod.py -from .base import * - -DEBUG = False -ALLOWED_HOSTS = os.getenv("ALLOWED_HOSTS", "").split(",") - -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.mysql', - 'NAME': os.getenv("MYSQL_DATABASE"), - 'USER': os.getenv("MYSQL_USER"), - 'PASSWORD': os.getenv("MYSQL_PASSWORD"), - 'HOST': 'mysqldb', - 'PORT': int(os.getenv("DB_PORT", 3306)), - 'OPTIONS': { - 'charset': 'utf8mb4', - }, - } -} - -CORS_ORIGIN_ALLOW_ALL = False diff --git a/backend/urls.py b/backend/urls.py deleted file mode 100644 index b9b2f8e8..00000000 --- a/backend/urls.py +++ /dev/null @@ -1,50 +0,0 @@ -""" -URL configuration for backend project. - -The `urlpatterns` list routes URLs to views. For more information please see: - https://docs.djangoproject.com/en/5.1/topics/http/urls/ -Examples: -Function views - 1. Add an import: from my_app import views - 2. Add a URL to urlpatterns: path('', views.home, name='home') -Class-based views - 1. Add an import: from other_app.views import Home - 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') -Including another URLconf - 1. Import the include() function: from django.urls import include, path - 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) -""" -from django.contrib import admin -from django.urls import path, include, re_path -from rest_framework.permissions import AllowAny -from drf_yasg.views import get_schema_view -from drf_yasg import openapi -from django.http import JsonResponse - -# 간단한 홈 페이지 뷰 추가 -def home(request): - return JsonResponse({"message": "Welcome to the API"}) - - -schema_view = get_schema_view( - openapi.Info( - title="Title", - default_version='v1', - description="Test description", - terms_of_service="", - contact=openapi.Contact(email="contact@snippets.local"), - license=openapi.License(name="BSD License"), - ), - public=True, - permission_classes=[AllowAny], -) - - -urlpatterns = [ - path("", home, name="home"), # ✅ 루트 경로 추가 - path('admin/', admin.site.urls), - path('api/v1/',include('post.urls')), - re_path(r'^swagger(?P\\.json|\\.yaml)$', schema_view.without_ui(cache_timeout=0), name='schema-json'), - re_path(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'), - re_path(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'), -] diff --git a/config/__init__.py b/config/__init__.py new file mode 100644 index 00000000..53f4ccb1 --- /dev/null +++ b/config/__init__.py @@ -0,0 +1,3 @@ +from .celery import app as celery_app + +__all__ = ("celery_app",) diff --git a/backend/asgi.py b/config/asgi.py similarity index 82% rename from backend/asgi.py rename to config/asgi.py index 20e4b447..95d8f0bc 100644 --- a/backend/asgi.py +++ b/config/asgi.py @@ -11,6 +11,6 @@ from django.core.asgi import get_asgi_application -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "backend.settings") +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings") application = get_asgi_application() diff --git a/config/celery.py b/config/celery.py new file mode 100644 index 00000000..9f3b6724 --- /dev/null +++ b/config/celery.py @@ -0,0 +1,71 @@ +"""Celery Configuration""" + +import os + +from celery import Celery +from kombu import Exchange, Queue + +# Django settings 모듈 설정 +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.dev") + +app = Celery("speedcam") + +# Django settings에서 CELERY_ prefix 설정을 자동으로 읽어옴 +app.config_from_object("django.conf:settings", namespace="CELERY") + +# Exchange 정의 +ocr_exchange = Exchange("ocr_exchange", type="direct", durable=True) +fcm_exchange = Exchange("fcm_exchange", type="direct", durable=True) +dlq_exchange = Exchange("dlq_exchange", type="fanout", durable=True) + +# Queue 정의 +app.conf.task_queues = ( + # 새로운 Queue (PRD 구조) + Queue( + "ocr_queue", + exchange=ocr_exchange, + routing_key="ocr", + queue_arguments={ + "x-dead-letter-exchange": "dlq_exchange", + "x-message-ttl": 3600000, + "x-max-priority": 10, + }, + ), + Queue( + "fcm_queue", + exchange=fcm_exchange, + routing_key="fcm", + queue_arguments={ + "x-dead-letter-exchange": "dlq_exchange", + "x-message-ttl": 3600000, + }, + ), + Queue( + "dlq_queue", + exchange=dlq_exchange, + routing_key="", + ), +) + +# Task 라우팅 +app.conf.task_routes = { + # 새로운 Tasks (PRD 구조) + "tasks.ocr_tasks.process_ocr": { + "queue": "ocr_queue", + "exchange": "ocr_exchange", + "routing_key": "ocr", + }, + "tasks.notification_tasks.send_notification": { + "queue": "fcm_queue", + "exchange": "fcm_exchange", + "routing_key": "fcm", + }, + "tasks.dlq_tasks.process_dlq_message": { + "queue": "dlq_queue", + "exchange": "dlq_exchange", + "routing_key": "", + }, +} + +# Task 자동 발견 +app.autodiscover_tasks(["tasks"]) diff --git a/config/db_router.py b/config/db_router.py new file mode 100644 index 00000000..551afa69 --- /dev/null +++ b/config/db_router.py @@ -0,0 +1,70 @@ +""" +MSA Database Router + +각 서비스(앱)별로 독립적인 DB를 사용하도록 라우팅합니다. +- vehicles 앱 → vehicles DB +- detections 앱 → detections DB +- notifications 앱 → notifications DB +- 기타 (auth, admin 등) → default DB +""" + + +class MSADatabaseRouter: + """ + MSA 환경을 위한 Database Router + 각 앱을 해당 데이터베이스로 라우팅합니다. + """ + + # 앱별 DB 매핑 + APP_DB_MAPPING = { + "vehicles": "vehicles_db", + "detections": "detections_db", + "notifications": "notifications_db", + } + + def _get_db_for_app(self, app_label): + """앱 라벨에 해당하는 DB 반환""" + return self.APP_DB_MAPPING.get(app_label, "default") + + def db_for_read(self, model, **hints): + """ + 읽기 작업을 위한 DB 선택 + """ + return self._get_db_for_app(model._meta.app_label) + + def db_for_write(self, model, **hints): + """ + 쓰기 작업을 위한 DB 선택 + """ + return self._get_db_for_app(model._meta.app_label) + + def allow_relation(self, obj1, obj2, **hints): + """ + 두 객체 간의 관계 허용 여부 + + MSA 원칙: 서비스 간 직접 FK 관계 불허. + 서비스 간 참조는 BigIntegerField ID Reference 패턴 사용. + """ + db1 = self._get_db_for_app(obj1._meta.app_label) + db2 = self._get_db_for_app(obj2._meta.app_label) + + if db1 == db2: + return True + + return False + + def allow_migrate(self, db, app_label, model_name=None, **hints): + """ + 마이그레이션 실행 DB 결정 + """ + target_db = self._get_db_for_app(app_label) + + # 해당 앱의 타겟 DB와 현재 DB가 일치하면 마이그레이션 허용 + if target_db == db: + return True + + # default DB에는 Django 기본 앱들만 마이그레이션 + if db == "default": + return app_label not in self.APP_DB_MAPPING + + return False diff --git a/backend/__init__.py b/config/settings/__init__.py similarity index 100% rename from backend/__init__.py rename to config/settings/__init__.py diff --git a/config/settings/base.py b/config/settings/base.py new file mode 100644 index 00000000..7c325de6 --- /dev/null +++ b/config/settings/base.py @@ -0,0 +1,188 @@ +# config/settings/base.py +import os +from pathlib import Path + +import pymysql + +pymysql.install_as_MySQLdb() + +BASE_DIR = Path(__file__).resolve().parent.parent.parent + +SECRET_KEY = os.getenv("SECRET_KEY", "insecure-key-change-in-production") + +# GCS (Google Cloud Storage) 설정 +GCS_BUCKET_NAME = os.getenv("GCS_BUCKET_NAME", "your-bucket-name") + +# Firebase 설정 +FIREBASE_CREDENTIALS = os.getenv("FIREBASE_CREDENTIALS") + +INSTALLED_APPS = [ + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + # Third-party + "corsheaders", + "rest_framework", + "django_filters", + "drf_yasg", + "django_celery_results", + "django_prometheus", + # Apps + "apps.vehicles", + "apps.detections", + "apps.notifications", +] + +MIDDLEWARE = [ + "django_prometheus.middleware.PrometheusBeforeMiddleware", + "corsheaders.middleware.CorsMiddleware", + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", + "django_prometheus.middleware.PrometheusAfterMiddleware", +] + +ROOT_URLCONF = "config.urls" + +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + }, +] + +WSGI_APPLICATION = "config.wsgi.application" + +# Internationalization +LANGUAGE_CODE = "ko-kr" +TIME_ZONE = "Asia/Seoul" +USE_I18N = True +USE_TZ = True + +# Static files +STATIC_URL = "/static/" +STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles") + +MEDIA_URL = "/media/" +MEDIA_ROOT = os.path.join(BASE_DIR, "media") + +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" + +# REST Framework +REST_FRAMEWORK = { + "DEFAULT_FILTER_BACKENDS": [ + "django_filters.rest_framework.DjangoFilterBackend", + "rest_framework.filters.OrderingFilter", + ], + "DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.PageNumberPagination", + "PAGE_SIZE": 20, +} + +# Swagger +SWAGGER_SETTINGS = { + "SECURITY_DEFINITIONS": { + "Bearer": { + "type": "apiKey", + "name": "Authorization", + "in": "header", + } + }, + "USE_SESSION_AUTH": False, +} + +# ================================================== +# Celery 설정 (config_from_object namespace="CELERY"로 자동 로딩) +# ================================================== +CELERY_BROKER_URL = os.getenv("CELERY_BROKER_URL", "amqp://sa:1234@rabbitmq:5672//") +CELERY_RESULT_BACKEND = "rpc://" +CELERY_ACCEPT_CONTENT = ["json"] +CELERY_TASK_SERIALIZER = "json" +CELERY_RESULT_SERIALIZER = "json" +CELERY_TIMEZONE = "Asia/Seoul" +CELERY_ENABLE_UTC = True + +# 안정성 설정 +CELERY_TASK_ACKS_LATE = True +CELERY_TASK_REJECT_ON_WORKER_LOST = True +CELERY_BROKER_CONNECTION_RETRY_ON_STARTUP = True + +# Timeout 설정 +CELERY_TASK_TIME_LIMIT = 300 # 5분 +CELERY_TASK_SOFT_TIME_LIMIT = 240 # 4분 + +# Prefetch +CELERY_WORKER_PREFETCH_MULTIPLIER = 1 + +# Priority +CELERY_TASK_QUEUE_MAX_PRIORITY = 10 +CELERY_TASK_DEFAULT_PRIORITY = 5 + +# 로그 설정 +CELERY_WORKER_HIJACK_ROOT_LOGGER = False +CELERY_WORKER_REDIRECT_STDOUTS = False + +# Flower 관리자 계정 +CELERY_FLOWER_USER = os.getenv("CELERY_FLOWER_USER", "admin") +CELERY_FLOWER_PASSWORD = os.getenv("CELERY_FLOWER_PASSWORD", "admin") + +# ================================================== +# CORS 설정 +# ================================================== +CORS_ALLOWED_ORIGINS = os.getenv( + "CORS_ALLOWED_ORIGINS", "http://localhost:5173,http://localhost:3000" +).split(",") + +CORS_ALLOW_CREDENTIALS = True + +# ================================================== +# Logging 설정 +# ================================================== +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "verbose": { + "format": "{levelname} {asctime} {module} [trace_id={otelTraceID} span_id={otelSpanID}] {message}", + "style": "{", + "defaults": {"otelTraceID": "0", "otelSpanID": "0"}, + }, + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "formatter": "verbose", + }, + }, + "root": { + "handlers": ["console"], + "level": "INFO", + }, + "loggers": { + "django": { + "handlers": ["console"], + "level": "INFO", + "propagate": False, + }, + "celery": { + "handlers": ["console"], + "level": "INFO", + "propagate": False, + }, + }, +} diff --git a/config/settings/dev.py b/config/settings/dev.py new file mode 100644 index 00000000..425ca4ce --- /dev/null +++ b/config/settings/dev.py @@ -0,0 +1,89 @@ +# config/settings/dev.py +from dotenv import load_dotenv + +from .base import * + +# 환경변수 로드 +env_path = os.path.join(BASE_DIR, "backend.env") +if os.path.exists(env_path): + load_dotenv(env_path) + +DEBUG = True +ALLOWED_HOSTS = ["*"] + +# ================================================== +# MSA Database 설정 +# ================================================== +# 각 서비스별 독립 DB 사용 +# - default: Django 기본 테이블 (auth, admin, sessions 등) +# - vehicles_db: 차량 서비스 +# - detections_db: 감지 서비스 +# - notifications_db: 알림 서비스 +# ================================================== + +DB_HOST = os.getenv("DB_HOST", "mysql") +DB_PORT = int(os.getenv("DB_PORT", 3306)) +DB_USER = os.getenv("DB_USER", "sa") +DB_PASSWORD = os.getenv("DB_PASSWORD", "1234") + +DATABASES = { + "default": { + "ENGINE": "django.db.backends.mysql", + "NAME": os.getenv("DB_NAME", "speedcam"), + "USER": DB_USER, + "PASSWORD": DB_PASSWORD, + "HOST": DB_HOST, + "PORT": DB_PORT, + "OPTIONS": { + "charset": "utf8mb4", + }, + }, + "vehicles_db": { + "ENGINE": "django.db.backends.mysql", + "NAME": os.getenv("DB_NAME_VEHICLES", "speedcam_vehicles"), + "USER": DB_USER, + "PASSWORD": DB_PASSWORD, + "HOST": DB_HOST, + "PORT": DB_PORT, + "OPTIONS": { + "charset": "utf8mb4", + }, + }, + "detections_db": { + "ENGINE": "django.db.backends.mysql", + "NAME": os.getenv("DB_NAME_DETECTIONS", "speedcam_detections"), + "USER": DB_USER, + "PASSWORD": DB_PASSWORD, + "HOST": DB_HOST, + "PORT": DB_PORT, + "OPTIONS": { + "charset": "utf8mb4", + }, + }, + "notifications_db": { + "ENGINE": "django.db.backends.mysql", + "NAME": os.getenv("DB_NAME_NOTIFICATIONS", "speedcam_notifications"), + "USER": DB_USER, + "PASSWORD": DB_PASSWORD, + "HOST": DB_HOST, + "PORT": DB_PORT, + "OPTIONS": { + "charset": "utf8mb4", + }, + }, +} + +# ================================================== +# Database Router 설정 +# ================================================== +DATABASE_ROUTERS = ["config.db_router.MSADatabaseRouter"] + +# CORS +CORS_ORIGIN_ALLOW_ALL = True + +# Celery (개발용 설정 오버라이드) +CELERY_BROKER_URL = os.getenv("CELERY_BROKER_URL", "amqp://sa:1234@rabbitmq:5672//") + +# 로깅 레벨 +LOGGING["root"]["level"] = "DEBUG" +LOGGING["loggers"]["django"]["level"] = "DEBUG" diff --git a/config/settings/prod.py b/config/settings/prod.py new file mode 100644 index 00000000..fa9df627 --- /dev/null +++ b/config/settings/prod.py @@ -0,0 +1,71 @@ +# config/settings/prod.py +from .base import * + +DEBUG = False +ALLOWED_HOSTS = os.getenv("ALLOWED_HOSTS", "").split(",") + +# ================================================== +# MSA Database 설정 +# ================================================== +DB_HOST = os.getenv("DB_HOST") +DB_PORT = int(os.getenv("DB_PORT", 3306)) +DB_USER = os.getenv("DB_USER") +DB_PASSWORD = os.getenv("DB_PASSWORD") + +DATABASES = { + "default": { + "ENGINE": "django.db.backends.mysql", + "NAME": os.getenv("DB_NAME", "speedcam"), + "USER": DB_USER, + "PASSWORD": DB_PASSWORD, + "HOST": DB_HOST, + "PORT": DB_PORT, + "OPTIONS": { + "charset": "utf8mb4", + }, + }, + "vehicles_db": { + "ENGINE": "django.db.backends.mysql", + "NAME": os.getenv("DB_NAME_VEHICLES", "speedcam_vehicles"), + "USER": DB_USER, + "PASSWORD": DB_PASSWORD, + "HOST": DB_HOST, + "PORT": DB_PORT, + "OPTIONS": { + "charset": "utf8mb4", + }, + }, + "detections_db": { + "ENGINE": "django.db.backends.mysql", + "NAME": os.getenv("DB_NAME_DETECTIONS", "speedcam_detections"), + "USER": DB_USER, + "PASSWORD": DB_PASSWORD, + "HOST": DB_HOST, + "PORT": DB_PORT, + "OPTIONS": { + "charset": "utf8mb4", + }, + }, + "notifications_db": { + "ENGINE": "django.db.backends.mysql", + "NAME": os.getenv("DB_NAME_NOTIFICATIONS", "speedcam_notifications"), + "USER": DB_USER, + "PASSWORD": DB_PASSWORD, + "HOST": DB_HOST, + "PORT": DB_PORT, + "OPTIONS": { + "charset": "utf8mb4", + }, + }, +} + +# ================================================== +# Database Router 설정 +# ================================================== +DATABASE_ROUTERS = ["config.db_router.MSADatabaseRouter"] + +# ================================================== +# CORS 설정 +# ================================================== +_cors_origins = os.getenv("CORS_ALLOWED_ORIGINS", "") +CORS_ALLOWED_ORIGINS = [o for o in _cors_origins.split(",") if o] diff --git a/config/urls.py b/config/urls.py new file mode 100644 index 00000000..bb610674 --- /dev/null +++ b/config/urls.py @@ -0,0 +1,101 @@ +""" +URL configuration for speedcam project. +""" + +from django.contrib import admin +from django.http import JsonResponse +from django.urls import include, path, re_path +from drf_yasg import openapi +from drf_yasg.views import get_schema_view +from rest_framework.permissions import AllowAny + + +def home(request): + """API 홈 엔드포인트""" + return JsonResponse( + { + "service": "SpeedCam API", + "version": "v1", + "status": "running", + "endpoints": { + "swagger": "/swagger/", + "redoc": "/redoc/", + "admin": "/admin/", + "api_v1": "/api/v1/", + }, + } + ) + + +def health(request): + """헬스체크 엔드포인트 (외부 의존성 검증 포함)""" + checks = {} + + # DB 연결 확인 + from django.db import connections + + for db_name in connections: + try: + connections[db_name].ensure_connection() + checks[db_name] = "ok" + except Exception as e: + checks[db_name] = f"error: {e}" + + # RabbitMQ 연결 확인 + try: + from config.celery import app as celery_app + + conn = celery_app.connection() + conn.ensure_connection(max_retries=1, timeout=3) + conn.close() + checks["rabbitmq"] = "ok" + except Exception as e: + checks["rabbitmq"] = f"error: {e}" + + is_healthy = all(v == "ok" for v in checks.values()) + status_code = 200 if is_healthy else 503 + + return JsonResponse( + {"status": "healthy" if is_healthy else "unhealthy", "checks": checks}, + status=status_code, + ) + + +schema_view = get_schema_view( + openapi.Info( + title="SpeedCam API", + default_version="v1", + description="과속 차량 감지 및 알림 시스템 API", + contact=openapi.Contact(email="admin@speedcam.local"), + ), + public=True, + permission_classes=[AllowAny], +) + +urlpatterns = [ + # 홈 & 헬스체크 + path("", home, name="home"), + path("health/", health, name="health"), + # Admin + path("admin/", admin.site.urls), + # API Documentation + re_path( + r"^swagger(?P\.json|\.yaml)$", + schema_view.without_ui(cache_timeout=0), + name="schema-json", + ), + re_path( + r"^swagger/$", + schema_view.with_ui("swagger", cache_timeout=0), + name="schema-swagger-ui", + ), + re_path( + r"^redoc/$", schema_view.with_ui("redoc", cache_timeout=0), name="schema-redoc" + ), + # Prometheus Metrics + path("", include("django_prometheus.urls")), + # API v1 + path("api/v1/", include("apps.vehicles.urls")), + path("api/v1/", include("apps.detections.urls")), + path("api/v1/", include("apps.notifications.urls")), +] diff --git a/backend/wsgi.py b/config/wsgi.py similarity index 82% rename from backend/wsgi.py rename to config/wsgi.py index ae9503c6..ccbcec9b 100644 --- a/backend/wsgi.py +++ b/config/wsgi.py @@ -11,6 +11,6 @@ from django.core.wsgi import get_wsgi_application -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "backend.settings") +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings") application = get_wsgi_application() diff --git a/core/__init__.py b/core/__init__.py new file mode 100644 index 00000000..8e7b66dd --- /dev/null +++ b/core/__init__.py @@ -0,0 +1 @@ +# Core Package - 공통 유틸리티 diff --git a/core/firebase/__init__.py b/core/firebase/__init__.py new file mode 100644 index 00000000..b9dd27e5 --- /dev/null +++ b/core/firebase/__init__.py @@ -0,0 +1,4 @@ +# Firebase Module +from .fcm import FCMClient, send_push_notification + +__all__ = ["send_push_notification", "FCMClient"] diff --git a/core/firebase/fcm.py b/core/firebase/fcm.py new file mode 100644 index 00000000..6316b999 --- /dev/null +++ b/core/firebase/fcm.py @@ -0,0 +1,169 @@ +"""Firebase Cloud Messaging Client""" + +import logging +import os +from typing import Dict, List, Optional + +logger = logging.getLogger(__name__) + +# Firebase 초기화 상태 +_firebase_initialized = False + + +def initialize_firebase(): + """Firebase Admin SDK 초기화""" + global _firebase_initialized + + if _firebase_initialized: + return + + import firebase_admin + from firebase_admin import credentials + + if firebase_admin._apps: + _firebase_initialized = True + return + + # FIREBASE_CREDENTIALS 환경변수 우선 + cred_path = os.getenv("FIREBASE_CREDENTIALS") + if cred_path and os.path.exists(cred_path): + cred = credentials.Certificate(cred_path) + firebase_admin.initialize_app(cred) + logger.info(f"Firebase initialized with credentials: {cred_path}") + else: + # GOOGLE_APPLICATION_CREDENTIALS 사용 + firebase_admin.initialize_app() + logger.info("Firebase initialized with default credentials") + + _firebase_initialized = True + + +class FCMClient: + """FCM 클라이언트""" + + def __init__(self): + initialize_firebase() + + def send_to_token( + self, token: str, title: str, body: str, data: Optional[Dict[str, str]] = None + ) -> str: + """단일 토큰에 알림 전송""" + from firebase_admin import messaging + + message = messaging.Message( + notification=messaging.Notification(title=title, body=body), + data=data or {}, + token=token, + ) + + response = messaging.send(message) + logger.info(f"FCM sent to token: {response}") + return response + + def send_to_tokens( + self, + tokens: List[str], + title: str, + body: str, + data: Optional[Dict[str, str]] = None, + ) -> Dict: + """여러 토큰에 알림 전송""" + from firebase_admin import messaging + + message = messaging.MulticastMessage( + notification=messaging.Notification(title=title, body=body), + data=data or {}, + tokens=tokens, + ) + + response = messaging.send_each_for_multicast(message) + + result = { + "success_count": response.success_count, + "failure_count": response.failure_count, + "responses": [], + } + + for idx, resp in enumerate(response.responses): + if resp.success: + result["responses"].append( + { + "token": tokens[idx], + "success": True, + "message_id": resp.message_id, + } + ) + else: + result["responses"].append( + { + "token": tokens[idx], + "success": False, + "error": str(resp.exception), + } + ) + + logger.info( + f"FCM multicast: {response.success_count} success, " + f"{response.failure_count} failed" + ) + return result + + def subscribe_to_topic(self, tokens: List[str], topic: str) -> Dict: + """토큰을 FCM 토픽에 구독""" + from firebase_admin import messaging + + response = messaging.subscribe_to_topic(tokens, topic) + logger.info( + f"FCM topic subscribe '{topic}': {response.success_count} success, " + f"{response.failure_count} failed" + ) + return { + "success_count": response.success_count, + "failure_count": response.failure_count, + } + + def send_to_topic( + self, topic: str, title: str, body: str, data: Optional[Dict[str, str]] = None + ) -> str: + """토픽으로 알림 전송""" + from firebase_admin import messaging + + message = messaging.Message( + notification=messaging.Notification(title=title, body=body), + data=data or {}, + topic=topic, + ) + + response = messaging.send(message) + logger.info(f"FCM sent to topic '{topic}': {response}") + return response + + +# 편의 함수 +_fcm_client = None + + +def get_fcm_client() -> FCMClient: + global _fcm_client + if _fcm_client is None: + _fcm_client = FCMClient() + return _fcm_client + + +def send_push_notification( + token: str, title: str, body: str, data: Optional[Dict[str, str]] = None +) -> str: + """푸시 알림 전송 (편의 함수)""" + return get_fcm_client().send_to_token(token, title, body, data) + + +def subscribe_tokens_to_topic(tokens: List[str], topic: str) -> Dict: + """토픽 구독 (편의 함수)""" + return get_fcm_client().subscribe_to_topic(tokens, topic) + + +def send_topic_notification( + topic: str, title: str, body: str, data: Optional[Dict[str, str]] = None +) -> str: + """토픽 알림 전송 (편의 함수)""" + return get_fcm_client().send_to_topic(topic, title, body, data) diff --git a/core/gcs/__init__.py b/core/gcs/__init__.py new file mode 100644 index 00000000..7040b4e2 --- /dev/null +++ b/core/gcs/__init__.py @@ -0,0 +1,4 @@ +# GCS Module +from .client import GCSClient, download_image, upload_image + +__all__ = ["GCSClient", "download_image", "upload_image"] diff --git a/core/gcs/client.py b/core/gcs/client.py new file mode 100644 index 00000000..ff8c3e2c --- /dev/null +++ b/core/gcs/client.py @@ -0,0 +1,98 @@ +"""Google Cloud Storage Client""" + +import logging +import os +from typing import Optional + +from google.cloud import storage + +logger = logging.getLogger(__name__) + + +class GCSClient: + """GCS 클라이언트 래퍼""" + + _instance = None + _client = None + + def __new__(cls): + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + @property + def client(self) -> storage.Client: + if self._client is None: + self._client = storage.Client() + return self._client + + def get_bucket(self, bucket_name: Optional[str] = None) -> storage.Bucket: + """버킷 가져오기""" + bucket_name = bucket_name or os.getenv("GCS_BUCKET_NAME") + if not bucket_name: + raise ValueError("GCS_BUCKET_NAME is not set") + return self.client.bucket(bucket_name) + + def download_as_bytes(self, gcs_uri: str) -> bytes: + """GCS URI에서 파일 다운로드""" + # gs://bucket-name/path/to/file.jpg + parts = gcs_uri.replace("gs://", "").split("/", 1) + bucket_name = parts[0] + blob_path = parts[1] if len(parts) > 1 else "" + + bucket = self.client.bucket(bucket_name) + blob = bucket.blob(blob_path) + + logger.debug(f"Downloading from GCS: {gcs_uri}") + return blob.download_as_bytes() + + def upload_from_bytes( + self, + data: bytes, + blob_path: str, + bucket_name: Optional[str] = None, + content_type: str = "image/jpeg", + ) -> str: + """바이트 데이터를 GCS에 업로드""" + bucket = self.get_bucket(bucket_name) + blob = bucket.blob(blob_path) + blob.upload_from_string(data, content_type=content_type) + + gcs_uri = f"gs://{bucket.name}/{blob_path}" + logger.debug(f"Uploaded to GCS: {gcs_uri}") + return gcs_uri + + def get_signed_url( + self, blob_path: str, bucket_name: Optional[str] = None, expiration: int = 3600 + ) -> str: + """Signed URL 생성""" + from datetime import timedelta + + bucket = self.get_bucket(bucket_name) + blob = bucket.blob(blob_path) + + url = blob.generate_signed_url( + expiration=timedelta(seconds=expiration), method="GET" + ) + return url + + +# 편의 함수 +_gcs_client = None + + +def get_gcs_client() -> GCSClient: + global _gcs_client + if _gcs_client is None: + _gcs_client = GCSClient() + return _gcs_client + + +def download_image(gcs_uri: str) -> bytes: + """GCS에서 이미지 다운로드""" + return get_gcs_client().download_as_bytes(gcs_uri) + + +def upload_image(data: bytes, blob_path: str) -> str: + """이미지를 GCS에 업로드""" + return get_gcs_client().upload_from_bytes(data, blob_path) diff --git a/core/mqtt/__init__.py b/core/mqtt/__init__.py new file mode 100644 index 00000000..11ce08cd --- /dev/null +++ b/core/mqtt/__init__.py @@ -0,0 +1,4 @@ +# MQTT Module +from .subscriber import MQTTSubscriber + +__all__ = ["MQTTSubscriber"] diff --git a/core/mqtt/subscriber.py b/core/mqtt/subscriber.py new file mode 100644 index 00000000..f37bbe10 --- /dev/null +++ b/core/mqtt/subscriber.py @@ -0,0 +1,155 @@ +"""MQTT Subscriber for Edge Device messages""" + +import json +import logging +import os + +import paho.mqtt.client as mqtt +from django.utils import timezone +from django.utils.dateparse import parse_datetime + +logger = logging.getLogger(__name__) + + +class MQTTSubscriber: + """ + RabbitMQ MQTT Plugin을 통해 Edge Device 메시지를 수신하는 Subscriber + + Flow: + 1. Raspberry Pi -> MQTT Publish (detections/new) + 2. RabbitMQ MQTT Plugin -> 내부 변환 + 3. Django MQTT Subscriber -> 메시지 수신 + 4. Detection 생성 (pending) -> OCR Task 발행 + """ + + def __init__(self): + self.client = mqtt.Client( + callback_api_version=mqtt.CallbackAPIVersion.VERSION2, + protocol=mqtt.MQTTv311, + client_id=f"django-main-{os.getpid()}", + ) + self.client.on_connect = self.on_connect + self.client.on_message = self.on_message + self.client.on_disconnect = self.on_disconnect + + # 인증 설정 + username = os.getenv("MQTT_USER", "sa") + password = os.getenv("MQTT_PASS", "1234") + self.client.username_pw_set(username, password) + + def on_connect(self, client, userdata, flags, reason_code, properties): + """MQTT 연결 시 토픽 구독""" + if reason_code.is_failure: + logger.error(f"MQTT connection failed: {reason_code}") + else: + logger.info("Connected to MQTT broker") + client.subscribe("detections/new", qos=1) + + def on_disconnect( + self, client, userdata, disconnect_flags, reason_code, properties + ): + """연결 끊김 처리""" + if reason_code.is_failure: + logger.warning( + f"Unexpected MQTT disconnect: {reason_code}, reconnecting..." + ) + + def on_message(self, client, userdata, msg): + """ + 메시지 수신 시 처리 + 1. DB에 pending 레코드 즉시 생성 (데이터 손실 방지) + 2. OCR Task 발행 + """ + try: + payload = json.loads(msg.payload.decode()) + logger.info(f"Received MQTT message: {payload.get('camera_id')}") + + # Import here to avoid circular imports + from apps.detections.models import Detection + from tasks.ocr_tasks import process_ocr + + # 1. Detection 레코드 생성 (status=pending, detections_db) + detection = Detection.objects.using("detections_db").create( + camera_id=payload.get("camera_id"), + location=payload.get("location"), + detected_speed=payload["detected_speed"], + speed_limit=payload.get("speed_limit", 60.0), + detected_at=self._parse_detected_at(payload.get("detected_at")), + image_gcs_uri=payload["image_gcs_uri"], + status="pending", + ) + + logger.info(f"Detection {detection.id} created (pending)") + + # 2. OCR Task 발행 (AMQP via Celery) + process_ocr.apply_async( + args=[detection.id], + kwargs={"gcs_uri": payload["image_gcs_uri"]}, + queue="ocr_queue", + priority=5, + ) + + logger.info(f"OCR task dispatched for detection {detection.id}") + + except json.JSONDecodeError as e: + logger.error(f"Invalid JSON in MQTT message: {e}") + except KeyError as e: + logger.error(f"Missing required field in MQTT message: {e}") + except Exception as e: + logger.error(f"Error processing MQTT message: {e}") + + @staticmethod + def _parse_detected_at(value): + """detected_at 문자열을 datetime으로 파싱""" + if value is None: + return timezone.now() + if isinstance(value, str): + parsed = parse_datetime(value) + if parsed is None: + logger.warning( + f"Invalid detected_at format: {value}, using current time" + ) + return timezone.now() + if timezone.is_naive(parsed): + parsed = timezone.make_aware(parsed) + return parsed + return value + + def start(self): + """MQTT Subscriber 시작 (blocking)""" + host = os.getenv("RABBITMQ_HOST", "rabbitmq") + port = int(os.getenv("MQTT_PORT", 1883)) + + logger.info(f"Connecting to MQTT broker at {host}:{port}") + + try: + self.client.connect(host, port, keepalive=60) + self.client.loop_forever() + except Exception as e: + logger.error(f"Failed to connect to MQTT broker: {e}") + raise + + def stop(self): + """MQTT Subscriber 종료""" + self.client.disconnect() + logger.info("MQTT Subscriber stopped") + + +def start_mqtt_subscriber(blocking=True): + """ + MQTT Subscriber 시작 + + Args: + blocking: True면 현재 스레드에서 블로킹 실행, + False면 daemon 스레드에서 백그라운드 실행 + """ + subscriber = MQTTSubscriber() + if blocking: + subscriber.start() + else: + import threading + + thread = threading.Thread(target=subscriber.start, daemon=True) + thread.start() + logger.info("MQTT Subscriber started in background thread") + return subscriber diff --git a/backend/settings/__init__.py b/credentials/.gitkeep similarity index 100% rename from backend/settings/__init__.py rename to credentials/.gitkeep diff --git a/credentials/README.md b/credentials/README.md new file mode 100644 index 00000000..beaf23d2 --- /dev/null +++ b/credentials/README.md @@ -0,0 +1,28 @@ +# Credentials 폴더 + +이 폴더에는 GCP 및 Firebase 인증 관련 파일들이 위치합니다. + +## 파일 목록 + +| 파일명 | 용도 | 환경변수 | +|--------|------|----------| +| `firebase-service-account.json` | FCM 푸시 알림 | `FIREBASE_CREDENTIALS` | +| `gcp-cloud-storage.json` | GCS 이미지 저장소 | `GOOGLE_APPLICATION_CREDENTIALS` | + +## 설정 방법 + +### 1. Firebase Service Account +1. [Firebase Console](https://console.firebase.google.com/) 접속 +2. 프로젝트 설정 → 서비스 계정 → 새 비공개 키 생성 +3. 다운로드한 JSON 파일을 `firebase-service-account.json`으로 저장 + +### 2. GCP Service Account (Cloud Storage) +1. [GCP Console](https://console.cloud.google.com/) 접속 +2. IAM 및 관리자 → 서비스 계정 → 키 생성 +3. 필요한 역할: `Storage Object Viewer`, `Storage Object Creator` +4. 다운로드한 JSON 파일을 `gcp-cloud-storage.json`으로 저장 + +## 주의사항 + +- ⚠️ **실제 인증 파일은 절대 Git에 커밋하지 마세요** +- Docker 환경에서는 볼륨 마운트 또는 Secret Manager 사용 권장 diff --git a/docker-compose.app.yml b/docker-compose.app.yml deleted file mode 100644 index adfd9c2e..00000000 --- a/docker-compose.app.yml +++ /dev/null @@ -1,30 +0,0 @@ -services: - mysqldb: - image: mysql:latest - container_name: mysqldb - environment: - MYSQL_USER : ${MYSQL_USER} - MYSQL_PASSWORD : ${MYSQL_PASSWORD} - MYSQL_DATABASE : ${MYSQL_DATABASE} - MYSQL_ROOT_PASSWORD : ${MYSQL_ROOT_PASSWORD} - ports: - - "3306:3306" - - backend: - image: ${DOCKER_USERNAME}/django-backend:latest - build: - dockerfile: Dockerfile-prod - container_name: backend - ports: - - "8000:8000" - volumes: - - ./:/app - restart: always - depends_on: - - mysqldb - environment: - - DJANGO_SETTINGS_MODULE=backend.settings.prod - command: > - bash -c "python wait_mysql.py && - python manage.py migrate && - exec gunicorn config.wsgi:application --bind 0.0.0.0:8000 --workers=4 --threads=2" \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 5957f22f..00000000 --- a/docker-compose.yml +++ /dev/null @@ -1,28 +0,0 @@ -services: - mysqldb: - image: mysql:latest - container_name: mysqldb - environment: - MYSQL_USER : sa - MYSQL_PASSWORD : 1234 - MYSQL_DATABASE : capstone - MYSQL_ROOT_PASSWORD : 1234 - ports: - - "3306:3306" - - backend: - build: - dockerfile: Dockerfile - container_name: backend - ports: - - "8000:8000" - volumes: - - ./:/app - restart: always - depends_on: - - mysqldb - command: | - bash -c "python wait_mysql.py && - python manage.py makemigrations && - python manage.py migrate && - python manage.py runserver 0.0.0.0:8000" \ No newline at end of file diff --git a/docker/Dockerfile.alert b/docker/Dockerfile.alert new file mode 100644 index 00000000..2dd52ae3 --- /dev/null +++ b/docker/Dockerfile.alert @@ -0,0 +1,23 @@ +FROM python:3.12-slim + +WORKDIR /app + +# 시스템 의존성 +RUN apt-get update && apt-get install -y \ + gcc \ + default-libmysqlclient-dev \ + pkg-config \ + && rm -rf /var/lib/apt/lists/* + +# 의존성 설치 +COPY requirements/base.txt requirements/alert.txt ./requirements/ +RUN pip install --no-cache-dir -r requirements/alert.txt + +# 앱 복사 +COPY . . + +# 스크립트 실행 권한 +RUN chmod +x scripts/*.sh + +CMD ["sh", "scripts/start_alert_worker.sh"] + diff --git a/docker/Dockerfile.main b/docker/Dockerfile.main new file mode 100644 index 00000000..1ab343d0 --- /dev/null +++ b/docker/Dockerfile.main @@ -0,0 +1,25 @@ +FROM python:3.12-slim + +WORKDIR /app + +# 시스템 의존성 +RUN apt-get update && apt-get install -y \ + gcc \ + default-libmysqlclient-dev \ + pkg-config \ + && rm -rf /var/lib/apt/lists/* + +# 의존성 설치 +COPY requirements/base.txt requirements/main.txt ./requirements/ +RUN pip install --no-cache-dir -r requirements/main.txt + +# 앱 복사 +COPY . . + +# 스크립트 실행 권한 +RUN chmod +x scripts/*.sh + +EXPOSE 8000 + +CMD ["sh", "scripts/start_main.sh"] + diff --git a/docker/Dockerfile.ocr b/docker/Dockerfile.ocr new file mode 100644 index 00000000..1c23bae0 --- /dev/null +++ b/docker/Dockerfile.ocr @@ -0,0 +1,28 @@ +FROM python:3.12-slim + +WORKDIR /app + +# 시스템 의존성 (OpenCV) +RUN apt-get update && apt-get install -y \ + gcc \ + default-libmysqlclient-dev \ + pkg-config \ + libgl1 \ + libglib2.0-0 \ + libsm6 \ + libxext6 \ + libxrender1 \ + && rm -rf /var/lib/apt/lists/* + +# 의존성 설치 +COPY requirements/base.txt requirements/ocr.txt ./requirements/ +RUN pip install --no-cache-dir -r requirements/ocr.txt + +# 앱 복사 +COPY . . + +# 스크립트 실행 권한 +RUN chmod +x scripts/*.sh + +CMD ["sh", "scripts/start_ocr_worker.sh"] + diff --git a/docker/docker-compose.monitoring.yml b/docker/docker-compose.monitoring.yml new file mode 100644 index 00000000..03d74d77 --- /dev/null +++ b/docker/docker-compose.monitoring.yml @@ -0,0 +1,170 @@ +services: + # =========================================== + # OpenTelemetry Collector + # =========================================== + otel-collector: + image: otel/opentelemetry-collector-contrib:0.98.0 + container_name: speedcam-otel-collector + command: ["--config", "/etc/otel-collector-config.yml"] + volumes: + - ./monitoring/otel-collector/otel-collector-config.yml:/etc/otel-collector-config.yml:ro + ports: + - "4317:4317" # OTLP gRPC + - "4318:4318" # OTLP HTTP + - "8889:8889" # Prometheus exporter + networks: + - speedcam-network + + # =========================================== + # Jaeger (Distributed Tracing) + # =========================================== + jaeger: + image: jaegertracing/all-in-one:1.57 + container_name: speedcam-jaeger + environment: + - COLLECTOR_OTLP_ENABLED=true + ports: + - "16686:16686" # Jaeger UI + - "14250:14250" # gRPC (collector) + networks: + - speedcam-network + + # =========================================== + # Prometheus (Metrics) + # =========================================== + prometheus: + image: prom/prometheus:v2.51.2 + container_name: speedcam-prometheus + volumes: + - ./monitoring/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml:ro + - ./monitoring/prometheus/data:/prometheus + ports: + - "9090:9090" + command: + - "--config.file=/etc/prometheus/prometheus.yml" + - "--storage.tsdb.retention.time=15d" + - "--web.enable-remote-write-receiver" + networks: + - speedcam-network + + # =========================================== + # Grafana (Dashboards) + # =========================================== + grafana: + image: grafana/grafana:10.4.2 + container_name: speedcam-grafana + environment: + - GF_SECURITY_ADMIN_USER=admin + - GF_SECURITY_ADMIN_PASSWORD=admin + - GF_USERS_ALLOW_SIGN_UP=false + volumes: + - ./monitoring/grafana/provisioning:/etc/grafana/provisioning:ro + - ./monitoring/grafana/data:/var/lib/grafana + ports: + - "3000:3000" + depends_on: + - prometheus + - jaeger + - loki + networks: + - speedcam-network + + # =========================================== + # Loki (Log Aggregation) + # =========================================== + loki: + image: grafana/loki:2.9.6 + container_name: speedcam-loki + volumes: + - ./monitoring/loki/loki-config.yml:/etc/loki/local-config.yaml:ro + - ./monitoring/loki/data:/loki + ports: + - "3100:3100" + command: -config.file=/etc/loki/local-config.yaml + networks: + - speedcam-network + + # =========================================== + # Promtail (Log Shipper -> Loki) + # =========================================== + promtail: + image: grafana/promtail:2.9.6 + container_name: speedcam-promtail + volumes: + - ./monitoring/promtail/promtail-config.yml:/etc/promtail/config.yml:ro + - /var/run/docker.sock:/var/run/docker.sock:ro + - /var/lib/docker/containers:/var/lib/docker/containers:ro + command: -config.file=/etc/promtail/config.yml + depends_on: + - loki + networks: + - speedcam-network + + # =========================================== + # MySQL Exporter + # =========================================== + mysqld-exporter: + image: prom/mysqld-exporter:v0.15.1 + container_name: speedcam-mysqld-exporter + volumes: + - ./monitoring/mysqld-exporter/.my.cnf:/cfg/.my.cnf:ro + command: + - "--config.my-cnf=/cfg/.my.cnf" + ports: + - "9104:9104" + restart: unless-stopped + networks: + - speedcam-network + + # =========================================== + # Celery Exporter (Queue/Task Metrics) + # =========================================== + celery-exporter: + image: danihodovic/celery-exporter:0.10.3 + container_name: speedcam-celery-exporter + environment: + CE_BROKER_URL: "amqp://sa:1234@rabbitmq:5672//" + ports: + - "9808:9808" + restart: unless-stopped + networks: + - speedcam-network + + # =========================================== + # cAdvisor (Container Metrics) + # =========================================== + cadvisor: + image: gcr.io/cadvisor/cadvisor:v0.49.1 + container_name: speedcam-cadvisor + profiles: + - linux + volumes: + - /:/rootfs:ro + - /var/run:/var/run:ro + - /sys:/sys:ro + - /var/lib/docker/:/var/lib/docker:ro + ports: + - "8080:8080" + networks: + - speedcam-network + + # =========================================== + # K6 Load Test Runner (on-demand) + # =========================================== + k6: + image: grafana/k6:latest + container_name: speedcam-k6 + profiles: + - loadtest + volumes: + - ./k6:/scripts + environment: + K6_PROMETHEUS_RW_SERVER_URL: http://prometheus:9090/api/v1/write + K6_PROMETHEUS_RW_TREND_AS_NATIVE_HISTOGRAM: "true" + MAIN_SERVICE_URL: http://main:8000 + networks: + - speedcam-network + +networks: + speedcam-network: + external: true diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml new file mode 100644 index 00000000..94270e47 --- /dev/null +++ b/docker/docker-compose.yml @@ -0,0 +1,120 @@ +services: + # =========================================== + # Infrastructure Services + # =========================================== + mysql: + image: mysql:8.0 + container_name: speedcam-mysql + env_file: + - ../mysql.env + ports: + - "3306:3306" + volumes: + - mysql_data:/var/lib/mysql + - ./mysql/init.sql:/docker-entrypoint-initdb.d/init.sql:ro + healthcheck: + test: ["CMD", "mysqladmin", "ping", "-h", "localhost", "-u", "sa", "-p1234"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - speedcam-network + + rabbitmq: + image: rabbitmq:3.13-management + container_name: speedcam-rabbitmq + env_file: + - ../rabbitmq.env + ports: + - "5672:5672" + - "1883:1883" + - "15672:15672" + - "15692:15692" + volumes: + - rabbitmq_data:/var/lib/rabbitmq + command: > + bash -c "rabbitmq-plugins enable --offline rabbitmq_mqtt rabbitmq_prometheus && + rabbitmq-server" + healthcheck: + test: ["CMD", "rabbitmq-diagnostics", "check_running"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - speedcam-network + + # =========================================== + # Application Services + # =========================================== + main: + build: + context: .. + dockerfile: docker/Dockerfile.main + container_name: speedcam-main + env_file: + - ../backend.env + ports: + - "8000:8000" + volumes: + - ../credentials:/app/credentials:ro + depends_on: + mysql: + condition: service_healthy + rabbitmq: + condition: service_healthy + networks: + - speedcam-network + + ocr-worker: + build: + context: .. + dockerfile: docker/Dockerfile.ocr + container_name: speedcam-ocr + env_file: + - ../backend.env + volumes: + - ../credentials:/app/credentials:ro + depends_on: + - main + - rabbitmq + networks: + - speedcam-network + + alert-worker: + build: + context: .. + dockerfile: docker/Dockerfile.alert + container_name: speedcam-alert + env_file: + - ../backend.env + volumes: + - ../credentials:/app/credentials:ro + depends_on: + - main + - rabbitmq + networks: + - speedcam-network + + flower: + build: + context: .. + dockerfile: docker/Dockerfile.main + container_name: speedcam-flower + env_file: + - ../backend.env + command: celery -A config flower --port=5555 + ports: + - "5555:5555" + depends_on: + - rabbitmq + networks: + - speedcam-network + +volumes: + mysql_data: + rabbitmq_data: + +networks: + speedcam-network: + name: speedcam-network + driver: bridge diff --git a/docker/k6/load-test.js b/docker/k6/load-test.js new file mode 100644 index 00000000..999909b9 --- /dev/null +++ b/docker/k6/load-test.js @@ -0,0 +1,115 @@ +import http from 'k6/http'; +import { check, group, sleep } from 'k6'; +import { Rate, Trend } from 'k6/metrics'; + +// Custom metrics +const errorRate = new Rate('errors'); +const vehicleCreateDuration = new Trend('vehicle_create_duration', true); + +const BASE_URL = __ENV.MAIN_SERVICE_URL || 'http://main:8000'; + +export const options = { + scenarios: { + // Scenario 1: Smoke test (basic connectivity) + smoke: { + executor: 'constant-vus', + vus: 1, + duration: '10s', + startTime: '0s', + tags: { scenario: 'smoke' }, + }, + // Scenario 2: Average load + average_load: { + executor: 'ramping-vus', + startVUs: 0, + stages: [ + { duration: '30s', target: 10 }, // ramp up + { duration: '1m', target: 10 }, // steady + { duration: '10s', target: 0 }, // ramp down + ], + startTime: '15s', + tags: { scenario: 'average_load' }, + }, + // Scenario 3: Spike test + spike: { + executor: 'ramping-vus', + startVUs: 0, + stages: [ + { duration: '5s', target: 30 }, // spike up + { duration: '15s', target: 30 }, // hold spike + { duration: '5s', target: 0 }, // recover + ], + startTime: '2m', + tags: { scenario: 'spike' }, + }, + }, + thresholds: { + http_req_duration: ['p(95)<500'], // 95% of requests under 500ms + errors: ['rate<0.1'], // error rate under 10% + }, +}; + +// Helper to generate random Korean plate number +function randomPlate() { + const nums1 = Math.floor(Math.random() * 900) + 100; + const chars = '가나다라마바사아자차카타파하'; + const char = chars.charAt(Math.floor(Math.random() * chars.length)); + const nums2 = Math.floor(Math.random() * 9000) + 1000; + return `${nums1}${char}${nums2}`; +} + +export default function () { + group('Health Check', function () { + const res = http.get(`${BASE_URL}/health/`); + check(res, { + 'health status 200': (r) => r.status === 200, + 'health is healthy': (r) => r.json('status') === 'healthy', + }); + errorRate.add(res.status !== 200); + }); + + group('Vehicle CRUD', function () { + // Create + const plate = randomPlate(); + const createPayload = JSON.stringify({ + plate_number: plate, + owner_name: `테스트유저_${__VU}`, + owner_phone: `010-${Math.floor(Math.random() * 9000) + 1000}-${Math.floor(Math.random() * 9000) + 1000}`, + }); + + const createRes = http.post(`${BASE_URL}/api/v1/vehicles/`, createPayload, { + headers: { 'Content-Type': 'application/json' }, + }); + + check(createRes, { + 'vehicle created 201': (r) => r.status === 201, + }); + errorRate.add(createRes.status !== 201); + vehicleCreateDuration.add(createRes.timings.duration); + + // List + const listRes = http.get(`${BASE_URL}/api/v1/vehicles/`); + check(listRes, { + 'vehicle list 200': (r) => r.status === 200, + }); + errorRate.add(listRes.status !== 200); + }); + + group('Detections Read', function () { + const res = http.get(`${BASE_URL}/api/v1/detections/`); + check(res, { + 'detections list 200': (r) => r.status === 200, + }); + errorRate.add(res.status !== 200); + }); + + group('Notifications Read', function () { + const res = http.get(`${BASE_URL}/api/v1/notifications/`); + check(res, { + 'notifications list 200': (r) => r.status === 200, + }); + errorRate.add(res.status !== 200); + }); + + sleep(1); +} diff --git a/docker/k6/mqtt-load-test.py b/docker/k6/mqtt-load-test.py new file mode 100644 index 00000000..89b836ea --- /dev/null +++ b/docker/k6/mqtt-load-test.py @@ -0,0 +1,184 @@ +#!/usr/bin/env python3 +""" +MQTT Load Test - IoT Device Simulation + +Simulates Raspberry Pi cameras sending detection messages via MQTT. +Full pipeline: MQTT → Detection (pending) → OCR Worker → Alert Worker +""" + +import argparse +import json +import os +import random +import threading +import time +from datetime import datetime, timedelta, timezone + +import paho.mqtt.client as mqtt + +# Config from environment +MQTT_HOST = os.getenv("MQTT_HOST", "rabbitmq") +MQTT_PORT = int(os.getenv("MQTT_PORT", "1883")) +MQTT_USER = os.getenv("MQTT_USER", "sa") +MQTT_PASS = os.getenv("MQTT_PASS", "1234") +TOPIC = "detections/new" + +# Locations for realistic simulation +LOCATIONS = [ + "서울시 강남구 테헤란로", + "서울시 서초구 반포대로", + "서울시 송파구 올림픽로", + "경기도 성남시 분당구 판교역로", + "인천시 연수구 송도대로", + "서울시 마포구 월드컵북로", + "서울시 영등포구 여의대방로", + "부산시 해운대구 해운대로", +] + +CAMERA_IDS = [f"CAM-{str(i).zfill(3)}" for i in range(1, 21)] + +# Stats +stats = { + "published": 0, + "failed": 0, + "total_latency_ms": 0, + "start_time": None, +} +stats_lock = threading.Lock() + + +def generate_message(): + """Generate a realistic detection message.""" + kst = timezone(timedelta(hours=9)) + speed_limit = random.choice([60.0, 80.0, 100.0, 110.0]) + detected_speed = speed_limit + random.uniform(5, 50) + + return json.dumps( + { + "camera_id": random.choice(CAMERA_IDS), + "location": random.choice(LOCATIONS), + "detected_speed": round(detected_speed, 1), + "speed_limit": speed_limit, + "detected_at": datetime.now(kst).isoformat(), + "image_gcs_uri": ( + f"gs://speedcam-bucket/detections/" + f"{int(time.time() * 1000)}-{random.randint(1000, 9999)}.jpg" + ), + } + ) + + +def publish_worker(worker_id, rate_per_sec, duration_sec): + """Single worker thread that publishes MQTT messages.""" + client = mqtt.Client( + callback_api_version=mqtt.CallbackAPIVersion.VERSION2, + protocol=mqtt.MQTTv311, + client_id=f"loadtest-{worker_id}-{os.getpid()}", + ) + client.username_pw_set(MQTT_USER, MQTT_PASS) + + try: + client.connect(MQTT_HOST, MQTT_PORT, keepalive=60) + client.loop_start() + except Exception as e: + print(f"[Worker-{worker_id}] Connection failed: {e}") + with stats_lock: + stats["failed"] += 1 + return + + interval = 1.0 / rate_per_sec if rate_per_sec > 0 else 1.0 + end_time = time.time() + duration_sec + + while time.time() < end_time: + msg = generate_message() + start = time.time() + result = client.publish(TOPIC, msg, qos=1) + + if result.rc == mqtt.MQTT_ERR_SUCCESS: + latency_ms = (time.time() - start) * 1000 + with stats_lock: + stats["published"] += 1 + stats["total_latency_ms"] += latency_ms + else: + with stats_lock: + stats["failed"] += 1 + + elapsed = time.time() - start + sleep_time = max(0, interval - elapsed) + if sleep_time > 0: + time.sleep(sleep_time) + + client.loop_stop() + client.disconnect() + + +def print_stats(): + """Print periodic stats.""" + elapsed = time.time() - stats["start_time"] + published = stats["published"] + failed = stats["failed"] + total = published + failed + rate = published / elapsed if elapsed > 0 else 0 + avg_latency = stats["total_latency_ms"] / published if published > 0 else 0 + + print(f"\n{'='*60}") + print(f" Elapsed: {elapsed:.1f}s | Published: {published} | Failed: {failed}") + print(f" Rate: {rate:.1f} msg/s | Avg Latency: {avg_latency:.2f}ms") + print(f" Error Rate: {(failed/total*100) if total > 0 else 0:.2f}%") + print(f"{'='*60}") + + +def run_load_test(workers, rate_per_worker, duration): + """Run the load test with multiple workers.""" + print("\n MQTT Load Test Starting") + print(f" Host: {MQTT_HOST}:{MQTT_PORT}") + print(f" Workers: {workers}") + print( + f" Rate: {rate_per_worker}/s per worker ({workers * rate_per_worker}/s total)" + ) + print(f" Duration: {duration}s") + print(f" Topic: {TOPIC}") + print() + + stats["start_time"] = time.time() + threads = [] + + for i in range(workers): + t = threading.Thread( + target=publish_worker, + args=(i, rate_per_worker, duration), + ) + t.start() + threads.append(t) + + # Print stats periodically + monitor_end = time.time() + duration + while time.time() < monitor_end: + time.sleep(5) + print_stats() + + for t in threads: + t.join(timeout=10) + + print("\n FINAL RESULTS") + print_stats() + + +def main(): + parser = argparse.ArgumentParser(description="MQTT Load Test") + parser.add_argument( + "--workers", type=int, default=5, help="Number of concurrent workers" + ) + parser.add_argument( + "--rate", type=int, default=2, help="Messages per second per worker" + ) + parser.add_argument( + "--duration", type=int, default=60, help="Test duration in seconds" + ) + args = parser.parse_args() + + run_load_test(args.workers, args.rate, args.duration) + + +if __name__ == "__main__": + main() diff --git a/docker/monitoring/grafana/provisioning/dashboards/dashboards.yml b/docker/monitoring/grafana/provisioning/dashboards/dashboards.yml new file mode 100644 index 00000000..3063794b --- /dev/null +++ b/docker/monitoring/grafana/provisioning/dashboards/dashboards.yml @@ -0,0 +1,12 @@ +apiVersion: 1 + +providers: + - name: "default" + orgId: 1 + folder: "" + type: file + disableDeletion: false + editable: true + options: + path: /etc/grafana/provisioning/dashboards + foldersFromFilesStructure: false diff --git a/docker/monitoring/grafana/provisioning/datasources/datasources.yml b/docker/monitoring/grafana/provisioning/datasources/datasources.yml new file mode 100644 index 00000000..19e94ac4 --- /dev/null +++ b/docker/monitoring/grafana/provisioning/datasources/datasources.yml @@ -0,0 +1,29 @@ +apiVersion: 1 + +datasources: + - name: Prometheus + type: prometheus + access: proxy + url: http://prometheus:9090 + isDefault: true + editable: false + + - name: Jaeger + type: jaeger + uid: jaeger + access: proxy + url: http://jaeger:16686 + editable: false + + - name: Loki + type: loki + access: proxy + url: http://loki:3100 + editable: false + jsonData: + derivedFields: + - datasourceUid: jaeger + matcherRegex: "trace_id=(\\w+)" + name: TraceID + url: "$${__value.raw}" + datasourceName: Jaeger diff --git a/docker/monitoring/loki/loki-config.yml b/docker/monitoring/loki/loki-config.yml new file mode 100644 index 00000000..39c9a792 --- /dev/null +++ b/docker/monitoring/loki/loki-config.yml @@ -0,0 +1,37 @@ +auth_enabled: false + +server: + http_listen_port: 3100 + +common: + path_prefix: /loki + storage: + filesystem: + chunks_directory: /loki/chunks + rules_directory: /loki/rules + replication_factor: 1 + ring: + kvstore: + store: inmemory + +schema_config: + configs: + - from: "2024-01-01" + store: tsdb + object_store: filesystem + schema: v13 + index: + prefix: index_ + period: 24h + +limits_config: + retention_period: 168h # 7 days + max_global_streams_per_user: 10000 + ingestion_burst_size_mb: 16 + ingestion_rate_mb: 8 + +compactor: + working_directory: /loki/compactor + compaction_interval: 10m + retention_enabled: true + retention_delete_delay: 2h diff --git a/docker/monitoring/mysqld-exporter/.my.cnf b/docker/monitoring/mysqld-exporter/.my.cnf new file mode 100644 index 00000000..d3141809 --- /dev/null +++ b/docker/monitoring/mysqld-exporter/.my.cnf @@ -0,0 +1,5 @@ +[client] +user=sa +password=1234 +host=mysql +port=3306 diff --git a/docker/monitoring/otel-collector/otel-collector-config.yml b/docker/monitoring/otel-collector/otel-collector-config.yml new file mode 100644 index 00000000..56d22560 --- /dev/null +++ b/docker/monitoring/otel-collector/otel-collector-config.yml @@ -0,0 +1,39 @@ +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + +processors: + batch: + timeout: 5s + send_batch_size: 1024 + resource: + attributes: + - key: service.namespace + value: speedcam + action: upsert + +exporters: + otlp/jaeger: + endpoint: jaeger:4317 + tls: + insecure: true + prometheus: + endpoint: 0.0.0.0:8889 + namespace: speedcam + resource_to_telemetry_conversion: + enabled: true + +service: + pipelines: + traces: + receivers: [otlp] + processors: [batch, resource] + exporters: [otlp/jaeger] + metrics: + receivers: [otlp] + processors: [batch, resource] + exporters: [prometheus] diff --git a/docker/monitoring/prometheus/prometheus.yml b/docker/monitoring/prometheus/prometheus.yml new file mode 100644 index 00000000..39a18f97 --- /dev/null +++ b/docker/monitoring/prometheus/prometheus.yml @@ -0,0 +1,34 @@ +global: + scrape_interval: 15s + evaluation_interval: 15s + +scrape_configs: + # --- Application --- + - job_name: "django" + metrics_path: /metrics + static_configs: + - targets: ["main:8000"] + + # --- OpenTelemetry Collector --- + - job_name: "otel-collector" + static_configs: + - targets: ["otel-collector:8889"] + + # --- Infrastructure --- + - job_name: "rabbitmq" + static_configs: + - targets: ["rabbitmq:15692"] + + - job_name: "mysql" + static_configs: + - targets: ["mysqld-exporter:9104"] + + # --- Workers --- + - job_name: "celery" + static_configs: + - targets: ["celery-exporter:9808"] + + # --- Container Resources --- + - job_name: "cadvisor" + static_configs: + - targets: ["cadvisor:8080"] diff --git a/docker/monitoring/promtail/promtail-config.yml b/docker/monitoring/promtail/promtail-config.yml new file mode 100644 index 00000000..4a649176 --- /dev/null +++ b/docker/monitoring/promtail/promtail-config.yml @@ -0,0 +1,31 @@ +server: + http_listen_port: 9080 + grpc_listen_port: 0 + +positions: + filename: /tmp/positions.yaml + +clients: + - url: http://loki:3100/loki/api/v1/push + +scrape_configs: + - job_name: docker + docker_sd_configs: + - host: unix:///var/run/docker.sock + refresh_interval: 5s + filters: + - name: name + values: + - "speedcam-.*" + relabel_configs: + - source_labels: ["__meta_docker_container_name"] + regex: "/(.*)" + target_label: "container" + - source_labels: ["__meta_docker_container_name"] + regex: "/speedcam-(.*)" + target_label: "service" + pipeline_stages: + - regex: + expression: ".*trace_id=(?P[a-fA-F0-9]+).*" + - labels: + trace_id: diff --git a/docker/mysql/init.sql b/docker/mysql/init.sql new file mode 100644 index 00000000..09a728cb --- /dev/null +++ b/docker/mysql/init.sql @@ -0,0 +1,23 @@ +-- MSA Database Initialization Script +-- 각 서비스별 독립 데이터베이스 생성 + +-- Default DB (Django 기본 - auth, admin, sessions) +CREATE DATABASE IF NOT EXISTS speedcam CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; + +-- Vehicles Service DB +CREATE DATABASE IF NOT EXISTS speedcam_vehicles CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; + +-- Detections Service DB +CREATE DATABASE IF NOT EXISTS speedcam_detections CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; + +-- Notifications Service DB +CREATE DATABASE IF NOT EXISTS speedcam_notifications CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; + +-- Grant privileges to user +GRANT ALL PRIVILEGES ON speedcam.* TO 'sa'@'%'; +GRANT ALL PRIVILEGES ON speedcam_vehicles.* TO 'sa'@'%'; +GRANT ALL PRIVILEGES ON speedcam_detections.* TO 'sa'@'%'; +GRANT ALL PRIVILEGES ON speedcam_notifications.* TO 'sa'@'%'; + +FLUSH PRIVILEGES; + diff --git a/docs/ARCHITECTURE_COMPARISON.md b/docs/ARCHITECTURE_COMPARISON.md new file mode 100644 index 00000000..98e82f2d --- /dev/null +++ b/docs/ARCHITECTURE_COMPARISON.md @@ -0,0 +1,445 @@ +# SpeedCam Backend Architecture Evolution + +## 개요 + +이 문서는 SpeedCam 백엔드 시스템의 아키텍처 진화 과정을 설명합니다. 기존 아키텍처에서 발견된 구조적 한계점들과, 이를 해결하기 위해 Event Driven Architecture로 전환한 과정을 다룹니다. + +--- + +## 1. 기존 아키텍처의 한계 + +기존 아키텍처는 다음과 같은 구조를 가지고 있었습니다: + +```mermaid +graph TB + subgraph Edge["Edge Device (Raspberry Pi)"] + Camera["과속 카메라"] + end + + subgraph Backend["backend (Django)"] + API["API Handler"] + OCR["OCR 처리
(동기 실행)"] + end + + subgraph Workers["Celery Workers"] + CW["celery_worker
(알림 전송)"] + DLQ["celery_worker_dlq"] + end + + Camera -->|"HTTP POST"| API + API --> OCR + Backend --> RMQ["RabbitMQ"] + CW --> RMQ + Backend --> MySQL[("MySQL")] + CW --> MySQL + + style Backend fill:#ffcccc,stroke:#cc0000 + style OCR fill:#ff9999 +``` + +이 구조에서 다음과 같은 **4가지 핵심 문제**가 발생했습니다. + +--- + +### 1.1 문제 1: OCR 동기 처리로 인한 서버 처리량 저하 + +**문제 상황** + +Django 서버에서 OCR을 동기적으로 처리하면서, OCR 작업이 진행되는 동안 HTTP 스레드가 점유됩니다. OCR은 이미지에서 차량 번호판을 인식하는 CPU 집약적 작업으로, 건당 약 3초가 소요됩니다. + +```mermaid +sequenceDiagram + participant E1 as Edge Device 1 + participant E2 as Edge Device 2 + participant D as Django Server + + E1->>D: POST /detection (이미지) + activate D + Note over D: OCR 처리 중 (3초)
스레드 점유 + E2->>D: POST /detection (이미지) + Note over E2: ⏳ 대기... + D-->>E1: 200 OK + deactivate D + activate D + Note over D: OCR 처리 중 (3초) + D-->>E2: 200 OK + deactivate D +``` + +**발생하는 문제** + +| 지표 | 영향 | +|------|------| +| 서버 처리량 | 동시 요청 처리 불가, 순차 처리로 병목 발생 | +| 응답 시간 | 요청당 3초 이상 소요 | +| 리소스 효율 | API 서버가 OCR 연산에 리소스 소모 | + +--- + +### 1.2 문제 2: 느린 응답으로 인한 Edge Device 블로킹 + +**문제 상황** + +서버 응답이 3초 이상 걸리면서, Edge Device(Raspberry Pi) 측에서도 연쇄적인 문제가 발생합니다. HTTP 요청을 보낸 후 응답을 기다리는 동안 Edge Device의 스레드가 블로킹됩니다. + +```mermaid +sequenceDiagram + participant Camera as 카메라 모듈 + participant Pi as Raspberry Pi + participant Server as Django Server + + Camera->>Pi: 과속 차량 감지! + Pi->>Server: HTTP POST (이미지) + activate Pi + Note over Pi: ⚠️ 응답 대기 중
다음 감지 처리 불가 + activate Server + Note over Server: OCR 처리 (3초) + Server-->>Pi: 200 OK + deactivate Server + deactivate Pi + Note over Pi: 이제야 다음 감지 가능 +``` + +**발생하는 문제** + +| 지표 | 영향 | +|------|------| +| Edge 처리량 | 응답 대기 중 새로운 과속 차량 감지 불가 | +| 데이터 유실 | 대기 중 발생한 과속 이벤트 누락 가능 | +| 네트워크 비용 | HTTP 연결 유지 오버헤드 | + +--- + +### 1.3 문제 3: HTTP 기반 IoT 통신의 구조적 한계 + +**문제 상황** + +IoT 환경에서 HTTP는 적합하지 않은 프로토콜입니다: + +```mermaid +graph LR + subgraph Problems["HTTP 통신의 한계"] + P1["매 요청마다
TCP 핸드셰이크"] + P2["연결 유지
배터리 소모"] + P3["네트워크 단절 시
데이터 유실"] + P4["QoS 보장 없음"] + end + + Pi["Raspberry Pi"] -->|"HTTP POST"| Server["Django"] + + style Problems fill:#ffeeee +``` + +**발생하는 문제** + +| 한계 | 설명 | +|------|------| +| 연결 오버헤드 | 매 요청마다 새로운 TCP 연결 수립 필요 | +| 메시지 보장 없음 | 네트워크 불안정 시 데이터 유실, 재전송 로직 직접 구현 필요 | +| 단방향 통신 | 서버→Edge 방향 통신 어려움 (NAT/방화벽 문제) | +| 오프라인 처리 불가 | 네트워크 단절 시 버퍼링 메커니즘 없음 | + +--- + +### 1.4 문제 4: OCR 장애가 전체 서비스에 전파 + +**문제 상황** + +OCR이 Django 프로세스 내에서 실행되므로, OCR 관련 장애가 발생하면 API 서비스 전체에 영향을 미칩니다: + +```mermaid +graph TB + subgraph Backend["Django Server"] + API["API Endpoints
/vehicles, /users, ..."] + OCR["OCR 처리"] + end + + OCR -->|"장애 발생!"| API + API -->|"응답 불가"| Client["클라이언트"] + + style Backend fill:#ffcccc + style OCR fill:#ff6666 + style API fill:#ffaaaa +``` + +**발생하는 문제** + +| 장애 시나리오 | 영향 범위 | +|--------------|----------| +| OCR 라이브러리 메모리 누수 | Django 프로세스 전체 영향 | +| OCR 처리 무한 루프 | API 응답 불가 | +| OCR 의존성 충돌 | 서버 재시작 필요 | + +또한, OCR 처리량을 늘리기 위해서는 **Django 서버 전체를 스케일 아웃**해야 하는 비효율이 발생합니다. + +--- + +## 2. 새로운 아키텍처: Event Driven Architecture + +위 문제들을 해결하기 위해 **Event Driven Architecture**로 전환했습니다. + +```mermaid +graph TB + subgraph Edge["Edge Device"] + Camera["과속 카메라"] + end + + subgraph Main["main (Django)"] + API["API Handler"] + MQTT_Sub["MQTT Subscriber"] + Publisher["Event Publisher"] + end + + subgraph Workers["Event Processors"] + OCR["ocr-worker
• 감지 이벤트 처리
• OCR 수행"] + Alert["alert-worker
• 완료 이벤트 처리
• FCM 발송"] + end + + subgraph MessageBroker["RabbitMQ"] + MQTT["MQTT Plugin"] + Queue1[("감지 이벤트 큐")] + Queue2[("알림 이벤트 큐")] + end + + Camera -->|"MQTT Publish"| MQTT + MQTT --> MQTT_Sub + Publisher --> Queue1 + Queue1 --> OCR + OCR --> Queue2 + Queue2 --> Alert + + Main --> DB1[("default")] + Main --> DB2[("vehicles_db")] + OCR --> DB3[("detections_db")] + Alert --> DB4[("notifications_db")] + + style Main fill:#90EE90 + style OCR fill:#87CEEB + style Alert fill:#DDA0DD + style MessageBroker fill:#FFB6C1 +``` + +--- + +## 3. 문제 해결: Before → After + +### 3.1 해결 1: 비동기 이벤트 처리로 서버 처리량 극대화 + +**변경 내용** + +OCR 처리를 Django에서 분리하여 전용 Worker가 이벤트를 구독하고 처리합니다. + +```mermaid +sequenceDiagram + participant E1 as Edge Device 1 + participant E2 as Edge Device 2 + participant M as main (Django) + participant Q as RabbitMQ + participant O as ocr-worker + + E1->>M: MQTT (과속 감지) + M->>Q: 감지 이벤트 발행 + M-->>E1: ACK (즉시) + + E2->>M: MQTT (과속 감지) + M->>Q: 감지 이벤트 발행 + M-->>E2: ACK (즉시) + + Q->>O: 이벤트 1 수신 + Q->>O: 이벤트 2 수신 + Note over O: 병렬 OCR 처리 +``` + +**개선 효과** + +| 지표 | Before | After | +|------|--------|-------| +| 응답 시간 | 3초+ | **< 100ms** | +| 동시 처리 | 순차 처리 | **병렬 처리** | +| 서버 역할 | API + OCR | **API만 담당** | + +--- + +### 3.2 해결 2: 즉시 응답으로 Edge Device 해방 + +**변경 내용** + +서버가 이벤트를 큐에 발행하고 즉시 응답하므로, Edge Device는 블로킹 없이 다음 작업을 수행할 수 있습니다. + +```mermaid +sequenceDiagram + participant Camera as 카메라 모듈 + participant Pi as Raspberry Pi + participant M as main + participant Q as RabbitMQ + + Camera->>Pi: 과속 차량 #1 감지 + Pi->>M: MQTT Publish + M->>Q: 이벤트 발행 + M-->>Pi: ACK (즉시) + Note over Pi: ✅ 즉시 복귀 + + Camera->>Pi: 과속 차량 #2 감지 + Pi->>M: MQTT Publish + M-->>Pi: ACK (즉시) + Note over Pi: ✅ 연속 감지 가능 +``` + +**개선 효과** + +| 지표 | Before | After | +|------|--------|-------| +| Edge 블로킹 | 3초+ 대기 | **즉시 복귀** | +| 연속 감지 | 불가 | **가능** | +| 데이터 유실 | 대기 중 누락 | **큐에 보존** | + +--- + +### 3.3 해결 3: MQTT 프로토콜로 IoT 최적화 + +**변경 내용** + +HTTP 대신 IoT에 최적화된 MQTT 프로토콜을 사용합니다. RabbitMQ의 MQTT Plugin을 통해 MQTT와 AMQP를 통합합니다. + +```mermaid +graph TB + subgraph Edge["Edge Device"] + Pi["Raspberry Pi
• 영구 연결
• QoS 1 보장
• 오프라인 버퍼링"] + end + + subgraph Broker["RabbitMQ"] + MQTT["MQTT Plugin
Topic: detections/new"] + AMQP["AMQP Exchange"] + MQTT --> AMQP + end + + Pi -->|"MQTT (Port 1883)
경량 프로토콜"| MQTT + + style Pi fill:#90EE90 + style Broker fill:#FFB6C1 +``` + +**개선 효과** + +| 지표 | Before (HTTP) | After (MQTT) | +|------|---------------|--------------| +| 연결 방식 | 요청마다 연결 | **영구 연결** | +| 메시지 보장 | 없음 | **QoS 1 (At least once)** | +| 오프라인 처리 | 유실 | **브로커 버퍼링** | +| 양방향 통신 | 어려움 | **Subscribe 가능** | +| 프로토콜 오버헤드 | 높음 | **최소화** | + +--- + +### 3.4 해결 4: 완전한 장애 격리와 독립적 확장 + +**변경 내용** + +OCR을 별도 컨테이너로 분리하여 장애가 격리되고, 필요한 컴포넌트만 독립적으로 확장할 수 있습니다. + +```mermaid +graph TB + subgraph Isolation["장애 격리"] + Main["main
[정상]"] + OCR["ocr-worker
[장애 발생!]"] + Alert["alert-worker
[정상]"] + end + + Main --> Q[("RabbitMQ
이벤트 보존")] + OCR -.->|"장애"| Q + Alert --> Q + + Note1["API 서비스 정상 운영"] + Note2["이벤트는 큐에 보존
Worker 복구 시 자동 처리"] + + Main --> Note1 + Q --> Note2 + + style Main fill:#90EE90 + style OCR fill:#ffcccc + style Alert fill:#90EE90 +``` + +**확장 시나리오** + +OCR 처리량이 3배 필요한 경우: + +```mermaid +graph LR + subgraph Before["기존 방식"] + B1["Django x3
(전체 확장)"] + end + + subgraph After["새로운 방식"] + M["main x1"] + O["ocr-worker x3
(OCR만 확장)"] + A["alert-worker x1"] + end + + style B1 fill:#ffcccc + style O fill:#87CEEB +``` + +```bash +# OCR Worker만 확장 +docker-compose up -d --scale ocr-worker=3 +``` + +**개선 효과** + +| 지표 | Before | After | +|------|--------|-------| +| OCR 장애 영향 | API 전체 장애 | **OCR만 지연** | +| 진행 중 작업 | 유실 | **큐에 보존** | +| 확장 단위 | Django 전체 | **Worker별 독립** | +| 리소스 효율 | 낮음 | **필요한 것만 확장** | + +--- + +## 4. 아키텍처 전환 요약 + +### 4.1 이벤트 흐름 + +```mermaid +sequenceDiagram + participant Edge as Edge Device + participant Main as main + participant RMQ as RabbitMQ + participant OCR as ocr-worker + participant Alert as alert-worker + participant User as 사용자 앱 + + Edge->>Main: MQTT (과속 차량 감지) + Main->>Main: DB 저장 (pending) + Main->>RMQ: 과속 감지 이벤트 발행 + Main-->>Edge: ACK + + RMQ->>OCR: 감지 이벤트 수신 + OCR->>OCR: 번호판 OCR 처리 + OCR->>OCR: DB 업데이트 (completed) + OCR->>RMQ: OCR 완료 이벤트 발행 + + RMQ->>Alert: 완료 이벤트 수신 + Alert->>User: FCM Push 알림 +``` + +### 4.2 Before vs After 비교 + +| 문제 영역 | Before | After | +|----------|--------|-------| +| **OCR 처리** | Django 동기 (블로킹) | ocr-worker 비동기 | +| **응답 시간** | 3초+ | < 100ms | +| **IoT 프로토콜** | HTTP (오버헤드) | MQTT (경량, QoS) | +| **메시지 보장** | 없음 | At least once | +| **장애 격리** | 전체 영향 | 컴포넌트 격리 | +| **확장성** | 서버 전체 확장 | Worker별 독립 확장 | +| **데이터베이스** | 단일 DB | 서비스별 4개 DB | + +### 4.3 핵심 성과 + +**기존 아키텍처의 근본적 한계**였던 **OCR 동기 처리**를 제거하고, **Event Driven Architecture**로 전환함으로써: + +1. **서버 처리량 극대화**: API 서버는 이벤트 발행만 담당, OCR은 별도 Worker가 병렬 처리 +2. **Edge Device 효율화**: 즉시 응답으로 연속 감지 가능, 데이터 유실 방지 +3. **IoT 최적화**: MQTT 프로토콜로 경량화, 메시지 전달 보장, 오프라인 대응 +4. **운영 안정성**: 장애 격리, 독립적 확장, 이벤트 보존으로 시스템 복원력 확보 diff --git a/docs/DEPLOYMENT.md b/docs/DEPLOYMENT.md new file mode 100644 index 00000000..18cbdc5a --- /dev/null +++ b/docs/DEPLOYMENT.md @@ -0,0 +1,1468 @@ +# GCP 멀티 인스턴스 배포 가이드 + +## 목차 + +1. [사전 요구사항](#1-사전-요구사항) +2. [시스템 아키텍처](#2-시스템-아키텍처) +3. [GCP 인프라 설정](#3-gcp-인프라-설정) +4. [배포 디렉토리 구조](#4-배포-디렉토리-구조) +5. [Docker Compose 파일](#5-docker-compose-파일) +6. [설정 파일](#6-설정-파일) +7. [Docker 이미지 빌드 및 배포](#7-docker-이미지-빌드-및-배포) +8. [배포 순서](#8-배포-순서) +9. [배포 검증](#9-배포-검증) +10. [운영 가이드](#10-운영-가이드) +11. [트러블슈팅](#11-트러블슈팅) +12. [리소스 정리](#12-리소스-정리) + +--- + +## 1. 사전 요구사항 + +### 1.1 필수 도구 설치 + +| 도구 | 버전 | 설치 방법 | +|------|------|----------| +| Google Cloud SDK | 최신 | https://cloud.google.com/sdk/docs/install | +| Docker | 20.10+ | https://docs.docker.com/get-docker/ | +| Docker Compose | 2.0+ | Docker Desktop 포함 또는 별도 설치 | + +### 1.2 GCP 프로젝트 설정 + +```bash +# gcloud 초기화 및 로그인 +gcloud init + +# 프로젝트 확인 +gcloud config get-value project + +# 필수 API 활성화 +gcloud services enable compute.googleapis.com +gcloud services enable artifactregistry.googleapis.com +``` + +### 1.3 환경 변수 설정 + +```bash +# 프로젝트 설정 +export GCP_PROJECT_ID= +export GCP_REGION=asia-northeast3 +export GCP_ZONE=asia-northeast3-a + +# Docker Registry (Artifact Registry 사용) +export ARTIFACT_REGISTRY=${GCP_REGION}-docker.pkg.dev/${GCP_PROJECT_ID}/speedcam +``` + +--- + +## 2. 시스템 아키텍처 + +### 2.1 전체 아키텍처 + +```mermaid +graph TB + subgraph Internet + Client[Client/Browser] + Pi[Raspberry Pi
Edge Device] + end + + subgraph GCP["Google Cloud Platform (asia-northeast3)"] + subgraph VPC["VPC Network"] + subgraph Compute["GCE Instances"] + DB[speedcam-db
e2-highmem-2
MySQL + Exporters] + MQ[speedcam-mq
e2-medium
RabbitMQ] + App[speedcam-app
e2-medium
Django API + Flower] + OCR[speedcam-ocr
e2-standard-2
OCR Worker] + Alert[speedcam-alert
e2-small
Alert Worker] + Mon[speedcam-mon
e2-standard-2
Monitoring Stack] + end + end + + AR[Artifact Registry
speedcam] + GCS[Cloud Storage
Images] + end + + subgraph External["External Services"] + FCM[Firebase FCM] + end + + Client -->|HTTP :8000| App + Pi -->|MQTT :1883| MQ + Pi -->|Upload| GCS + + App -->|AMQP :5672| MQ + App -->|MySQL :3306| DB + + OCR -->|AMQP| MQ + OCR -->|MySQL| DB + OCR -->|Download| GCS + + Alert -->|AMQP| MQ + Alert -->|MySQL| DB + Alert -->|Push| FCM + + Mon -->|Scrape Metrics| DB + Mon -->|Scrape Metrics| MQ + Mon -->|Scrape Metrics| App + Mon -->|Scrape Metrics| OCR + Mon -->|Scrape Metrics| Alert + Mon -->|Collect Logs| DB + Mon -->|Collect Logs| MQ + Mon -->|Collect Logs| App + Mon -->|Collect Logs| OCR + Mon -->|Collect Logs| Alert + + AR -.->|Pull Image| App + AR -.->|Pull Image| OCR + AR -.->|Pull Image| Alert +``` + +### 2.2 메시지 흐름 + +```mermaid +sequenceDiagram + participant Pi as Raspberry Pi + participant GCS as Cloud Storage + participant RMQ as RabbitMQ + participant Main as Main Service + participant OCR as OCR Worker + participant Alert as Alert Worker + participant FCM as Firebase + + Note over Pi: 과속 차량 감지 + + Pi->>GCS: 1. 이미지 업로드 + Pi->>RMQ: 2. MQTT Publish (detections/new) + + RMQ->>Main: 3. MQTT Subscribe + Main->>Main: 4. Detection 생성 (pending) + Main->>RMQ: 5. AMQP Publish (ocr_queue) + + RMQ->>OCR: 6. Consume from ocr_queue + OCR->>GCS: 7. 이미지 다운로드 + OCR->>OCR: 8. EasyOCR 번호판 인식 + OCR->>OCR: 9. Detection 업데이트 (completed) + OCR->>RMQ: 10. AMQP Publish (fcm_queue) + + RMQ->>Alert: 11. Consume from fcm_queue + Alert->>FCM: 12. 푸시 알림 전송 + Alert->>Alert: 13. Notification 저장 +``` + +### 2.3 데이터베이스 구조 + +```mermaid +erDiagram + speedcam_vehicles { + bigint id PK + varchar plate_number UK + varchar owner_name + varchar owner_phone + varchar fcm_token + datetime created_at + datetime updated_at + } + + speedcam_detections { + bigint id PK + bigint vehicle_id "ID Reference" + float detected_speed + float speed_limit + varchar location + varchar camera_id + varchar image_gcs_uri + varchar ocr_result + float ocr_confidence + datetime detected_at + datetime processed_at + enum status + text error_message + datetime created_at + datetime updated_at + } + + speedcam_notifications { + bigint id PK + bigint detection_id "ID Reference" + varchar fcm_token + varchar title + text body + datetime sent_at + enum status + int retry_count + text error_message + datetime created_at + } + + speedcam_vehicles ||--o{ speedcam_detections : "vehicle_id" + speedcam_detections ||--o{ speedcam_notifications : "detection_id" +``` + +### 2.4 인스턴스 사양 + +| 인스턴스 이름 | 역할 | 머신 타입 | vCPU | Memory | 열어야 할 포트 (내부) | +|--------------|------|----------|------|--------|---------------------| +| `speedcam-db` | MySQL + Exporters | e2-highmem-2 | 2 | 16GB | 3306, 9104, 8080 | +| `speedcam-mq` | RabbitMQ | e2-medium | 2 | 4GB | 5672, 1883, 15672, 15692, 8080 | +| `speedcam-app` | Django API + Flower | e2-medium | 2 | 4GB | 8000, 5555, 8080 | +| `speedcam-ocr` | OCR Celery Worker | e2-standard-2 | 2 | 8GB | 8080 | +| `speedcam-alert` | Alert Celery Worker | e2-small | 2 | 2GB | 8080 | +| `speedcam-mon` | 모니터링 전체 스택 | e2-standard-2 | 2 | 8GB | 3000, 9090, 16686, 3100, 4317, 4318, 8889, 9808, 8080 | + +--- + +## 3. GCP 인프라 설정 + +### 3.1 VPC 네트워크 생성 + +```bash +# VPC 네트워크 생성 (커스텀 모드) +gcloud compute networks create speedcam-vpc \ + --subnet-mode=custom \ + --bgp-routing-mode=regional + +# 서브넷 생성 (asia-northeast3) +gcloud compute networks subnets create speedcam-subnet \ + --network=speedcam-vpc \ + --region=${GCP_REGION} \ + --range=10.178.0.0/20 +``` + +### 3.2 방화벽 규칙 설정 + +#### 3.2.1 내부 통신 허용 + +```bash +# 내부 통신 허용 (VPC 내부에서만) +gcloud compute firewall-rules create speedcam-internal \ + --network=speedcam-vpc \ + --allow=tcp:3306,tcp:5672,tcp:1883,tcp:15672,tcp:15692,tcp:8000,tcp:5555,tcp:4317,tcp:4318,tcp:8889,tcp:9090,tcp:3000,tcp:16686,tcp:3100,tcp:9104,tcp:9808,tcp:8080,tcp:9080 \ + --source-ranges=10.178.0.0/20 \ + --target-tags=speedcam \ + --description="SpeedCam internal communication" +``` + +#### 3.2.2 외부 접근 허용 (필요한 서비스만) + +```bash +# Django API 외부 접근 (프론트엔드) +gcloud compute firewall-rules create speedcam-api-external \ + --network=speedcam-vpc \ + --allow=tcp:8000 \ + --source-ranges=0.0.0.0/0 \ + --target-tags=speedcam-app \ + --description="Django API external access" + +# MQTT 외부 접근 (Edge Device) +gcloud compute firewall-rules create speedcam-mqtt-external \ + --network=speedcam-vpc \ + --allow=tcp:1883 \ + --source-ranges=0.0.0.0/0 \ + --target-tags=speedcam-mq \ + --description="MQTT external access for edge devices" + +# Grafana UI 외부 접근 (운영자만) +gcloud compute firewall-rules create speedcam-grafana-external \ + --network=speedcam-vpc \ + --allow=tcp:3000 \ + --source-ranges=/32 \ + --target-tags=speedcam-mon \ + --description="Grafana external access (admin only)" +``` + +### 3.3 Artifact Registry 설정 + +```bash +# 저장소 생성 +gcloud artifacts repositories create speedcam \ + --repository-format=docker \ + --location=${GCP_REGION} \ + --description="Speedcam MSA Docker images" + +# Docker 인증 설정 +gcloud auth configure-docker ${GCP_REGION}-docker.pkg.dev +``` + +### 3.4 GCE 인스턴스 생성 + +```bash +# 1. speedcam-db 인스턴스 +gcloud compute instances create speedcam-db \ + --zone=${GCP_ZONE} \ + --machine-type=e2-highmem-2 \ + --network-interface=subnet=speedcam-subnet,no-address \ + --tags=speedcam \ + --metadata=startup-script='#!/bin/bash +apt-get update +apt-get install -y docker.io docker-compose +systemctl start docker +systemctl enable docker' + +# 2. speedcam-mq 인스턴스 +gcloud compute instances create speedcam-mq \ + --zone=${GCP_ZONE} \ + --machine-type=e2-medium \ + --network-interface=subnet=speedcam-subnet,no-address \ + --tags=speedcam,speedcam-mq \ + --metadata=startup-script='#!/bin/bash +apt-get update +apt-get install -y docker.io docker-compose +systemctl start docker +systemctl enable docker' + +# 3. speedcam-app 인스턴스 +gcloud compute instances create speedcam-app \ + --zone=${GCP_ZONE} \ + --machine-type=e2-medium \ + --network-interface=subnet=speedcam-subnet,no-address \ + --tags=speedcam,speedcam-app \ + --scopes=cloud-platform \ + --metadata=startup-script='#!/bin/bash +apt-get update +apt-get install -y docker.io docker-compose +systemctl start docker +systemctl enable docker' + +# 4. speedcam-ocr 인스턴스 +gcloud compute instances create speedcam-ocr \ + --zone=${GCP_ZONE} \ + --machine-type=e2-standard-2 \ + --network-interface=subnet=speedcam-subnet,no-address \ + --tags=speedcam \ + --scopes=cloud-platform \ + --metadata=startup-script='#!/bin/bash +apt-get update +apt-get install -y docker.io docker-compose +systemctl start docker +systemctl enable docker' + +# 5. speedcam-alert 인스턴스 +gcloud compute instances create speedcam-alert \ + --zone=${GCP_ZONE} \ + --machine-type=e2-small \ + --network-interface=subnet=speedcam-subnet,no-address \ + --tags=speedcam \ + --scopes=cloud-platform \ + --metadata=startup-script='#!/bin/bash +apt-get update +apt-get install -y docker.io docker-compose +systemctl start docker +systemctl enable docker' + +# 6. speedcam-mon 인스턴스 +gcloud compute instances create speedcam-mon \ + --zone=${GCP_ZONE} \ + --machine-type=e2-standard-2 \ + --network-interface=subnet=speedcam-subnet,no-address \ + --tags=speedcam,speedcam-mon \ + --metadata=startup-script='#!/bin/bash +apt-get update +apt-get install -y docker.io docker-compose +systemctl start docker +systemctl enable docker' +``` + +--- + +## 4. 배포 디렉토리 구조 + +각 인스턴스에 배포할 파일 구조: + +``` +deploy/ +├── env/ +│ ├── backend.env # Django/Celery 공통 환경변수 +│ ├── mysql.env # MySQL 전용 (DB 인스턴스만) +│ └── rabbitmq.env # RabbitMQ 전용 (MQ 인스턴스만) +├── compose/ +│ ├── docker-compose.db.yml +│ ├── docker-compose.mq.yml +│ ├── docker-compose.app.yml +│ ├── docker-compose.ocr.yml +│ ├── docker-compose.alert.yml +│ └── docker-compose.mon.yml +├── config/ +│ ├── mysql/ +│ │ └── init.sql +│ ├── monitoring/ +│ │ ├── otel-collector/ +│ │ │ └── otel-collector-config.yml +│ │ ├── prometheus/ +│ │ │ └── prometheus.yml +│ │ ├── loki/ +│ │ │ └── loki-config.yml +│ │ ├── promtail/ +│ │ │ ├── promtail-config.yml # 각 인스턴스용 +│ │ │ └── promtail-config.mon.yml # 모니터링 인스턴스용 +│ │ ├── grafana/ +│ │ │ └── provisioning/ +│ │ │ ├── datasources/ +│ │ │ │ └── datasources.yml +│ │ │ └── dashboards/ +│ │ │ └── dashboards.yml +│ │ └── mysqld-exporter/ +│ │ └── .my.cnf +│ └── credentials/ +│ └── (GCP, Firebase 인증 파일) +└── images/ + └── (빌드된 이미지는 Artifact Registry 사용) +``` + +--- + +## 5. Docker Compose 파일 + +### 5.1 로컬 → 멀티 인스턴스 주요 변경점 + +| 항목 | 로컬 (현재) | 멀티 인스턴스 | +|------|------------|--------------| +| 네트워크 | `networks: speedcam-network` (bridge) | `network_mode: host` | +| 서비스 디스커버리 | 컨테이너명 (`mysql`, `rabbitmq`) | GCP 내부 IP | +| 포트 매핑 | `ports: "3306:3306"` | 불필요 (host 모드에서 직접 바인딩) | +| depends_on | 서비스 간 의존성 | 제거 (다른 인스턴스에 있으므로) | +| 이미지 | `build: context` | Artifact Registry 이미지 | +| cAdvisor | 모니터링에 1개 | 모든 인스턴스에 1개씩 | +| Promtail | 모니터링에 1개 | 모든 인스턴스에 1개씩 | + +### 5.2 docker-compose.db.yml + +speedcam-db 인스턴스에 배포 + +```yaml +services: + mysql: + image: mysql:8.0 + container_name: speedcam-mysql + restart: always + network_mode: host + env_file: + - ../env/mysql.env + volumes: + - mysql_data:/var/lib/mysql + - ../config/mysql/init.sql:/docker-entrypoint-initdb.d/init.sql:ro + healthcheck: + test: ["CMD", "mysqladmin", "ping", "-h", "localhost"] + interval: 10s + timeout: 5s + retries: 5 + + mysqld-exporter: + image: prom/mysqld-exporter:v0.15.1 + container_name: speedcam-mysqld-exporter + restart: always + network_mode: host + volumes: + - ../config/monitoring/mysqld-exporter/.my.cnf:/cfg/.my.cnf:ro + command: + - "--config.my-cnf=/cfg/.my.cnf" + depends_on: + mysql: + condition: service_healthy + + promtail: + image: grafana/promtail:2.9.6 + container_name: speedcam-promtail + restart: always + network_mode: host + volumes: + - ../config/monitoring/promtail/promtail-config.yml:/etc/promtail/config.yml:ro + - /var/run/docker.sock:/var/run/docker.sock:ro + - /var/lib/docker/containers:/var/lib/docker/containers:ro + command: -config.file=/etc/promtail/config.yml + + cadvisor: + image: gcr.io/cadvisor/cadvisor:v0.49.1 + container_name: speedcam-cadvisor + restart: always + network_mode: host + volumes: + - /:/rootfs:ro + - /var/run:/var/run:ro + - /sys:/sys:ro + - /var/lib/docker/:/var/lib/docker:ro + +volumes: + mysql_data: +``` + +### 5.3 docker-compose.mq.yml + +speedcam-mq 인스턴스에 배포 + +```yaml +services: + rabbitmq: + image: rabbitmq:3.13-management + container_name: speedcam-rabbitmq + restart: always + network_mode: host + env_file: + - ../env/rabbitmq.env + volumes: + - rabbitmq_data:/var/lib/rabbitmq + command: > + bash -c "rabbitmq-plugins enable --offline rabbitmq_mqtt rabbitmq_prometheus && + rabbitmq-server" + healthcheck: + test: ["CMD", "rabbitmq-diagnostics", "check_running"] + interval: 10s + timeout: 5s + retries: 5 + + promtail: + image: grafana/promtail:2.9.6 + container_name: speedcam-promtail + restart: always + network_mode: host + volumes: + - ../config/monitoring/promtail/promtail-config.yml:/etc/promtail/config.yml:ro + - /var/run/docker.sock:/var/run/docker.sock:ro + - /var/lib/docker/containers:/var/lib/docker/containers:ro + command: -config.file=/etc/promtail/config.yml + + cadvisor: + image: gcr.io/cadvisor/cadvisor:v0.49.1 + container_name: speedcam-cadvisor + restart: always + network_mode: host + volumes: + - /:/rootfs:ro + - /var/run:/var/run:ro + - /sys:/sys:ro + - /var/lib/docker/:/var/lib/docker:ro + +volumes: + rabbitmq_data: +``` + +### 5.4 docker-compose.app.yml + +speedcam-app 인스턴스에 배포 (이미지는 Artifact Registry에서 pull) + +```yaml +services: + main: + image: ${ARTIFACT_REGISTRY}/speedcam-main:latest + container_name: speedcam-main + restart: always + network_mode: host + env_file: + - ../env/backend.env + volumes: + - ../config/credentials:/app/credentials:ro + + flower: + image: ${ARTIFACT_REGISTRY}/speedcam-main:latest + container_name: speedcam-flower + restart: always + network_mode: host + env_file: + - ../env/backend.env + command: celery -A config flower --port=5555 + + promtail: + image: grafana/promtail:2.9.6 + container_name: speedcam-promtail + restart: always + network_mode: host + volumes: + - ../config/monitoring/promtail/promtail-config.yml:/etc/promtail/config.yml:ro + - /var/run/docker.sock:/var/run/docker.sock:ro + - /var/lib/docker/containers:/var/lib/docker/containers:ro + command: -config.file=/etc/promtail/config.yml + + cadvisor: + image: gcr.io/cadvisor/cadvisor:v0.49.1 + container_name: speedcam-cadvisor + restart: always + network_mode: host + volumes: + - /:/rootfs:ro + - /var/run:/var/run:ro + - /sys:/sys:ro + - /var/lib/docker/:/var/lib/docker:ro +``` + +### 5.5 docker-compose.ocr.yml + +speedcam-ocr 인스턴스에 배포 + +```yaml +services: + ocr-worker: + image: ${ARTIFACT_REGISTRY}/speedcam-ocr:latest + container_name: speedcam-ocr + restart: always + network_mode: host + env_file: + - ../env/backend.env + volumes: + - ../config/credentials:/app/credentials:ro + + promtail: + image: grafana/promtail:2.9.6 + container_name: speedcam-promtail + restart: always + network_mode: host + volumes: + - ../config/monitoring/promtail/promtail-config.yml:/etc/promtail/config.yml:ro + - /var/run/docker.sock:/var/run/docker.sock:ro + - /var/lib/docker/containers:/var/lib/docker/containers:ro + command: -config.file=/etc/promtail/config.yml + + cadvisor: + image: gcr.io/cadvisor/cadvisor:v0.49.1 + container_name: speedcam-cadvisor + restart: always + network_mode: host + volumes: + - /:/rootfs:ro + - /var/run:/var/run:ro + - /sys:/sys:ro + - /var/lib/docker/:/var/lib/docker:ro +``` + +### 5.6 docker-compose.alert.yml + +speedcam-alert 인스턴스에 배포 + +```yaml +services: + alert-worker: + image: ${ARTIFACT_REGISTRY}/speedcam-alert:latest + container_name: speedcam-alert + restart: always + network_mode: host + env_file: + - ../env/backend.env + volumes: + - ../config/credentials:/app/credentials:ro + + promtail: + image: grafana/promtail:2.9.6 + container_name: speedcam-promtail + restart: always + network_mode: host + volumes: + - ../config/monitoring/promtail/promtail-config.yml:/etc/promtail/config.yml:ro + - /var/run/docker.sock:/var/run/docker.sock:ro + - /var/lib/docker/containers:/var/lib/docker/containers:ro + command: -config.file=/etc/promtail/config.yml + + cadvisor: + image: gcr.io/cadvisor/cadvisor:v0.49.1 + container_name: speedcam-cadvisor + restart: always + network_mode: host + volumes: + - /:/rootfs:ro + - /var/run:/var/run:ro + - /sys:/sys:ro + - /var/lib/docker/:/var/lib/docker:ro +``` + +### 5.7 docker-compose.mon.yml + +speedcam-mon 인스턴스에 배포 + +```yaml +services: + otel-collector: + image: otel/opentelemetry-collector-contrib:0.98.0 + container_name: speedcam-otel-collector + restart: always + network_mode: host + volumes: + - ../config/monitoring/otel-collector/otel-collector-config.yml:/etc/otel-collector-config.yml:ro + command: ["--config", "/etc/otel-collector-config.yml"] + + jaeger: + image: jaegertracing/all-in-one:1.57 + container_name: speedcam-jaeger + restart: always + network_mode: host + environment: + - COLLECTOR_OTLP_ENABLED=true + + prometheus: + image: prom/prometheus:v2.51.2 + container_name: speedcam-prometheus + restart: always + network_mode: host + volumes: + - ../config/monitoring/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml:ro + - prometheus_data:/prometheus + command: + - "--config.file=/etc/prometheus/prometheus.yml" + - "--storage.tsdb.retention.time=15d" + - "--web.enable-remote-write-receiver" + + grafana: + image: grafana/grafana:10.4.2 + container_name: speedcam-grafana + restart: always + network_mode: host + environment: + - GF_SECURITY_ADMIN_USER=admin + - GF_SECURITY_ADMIN_PASSWORD=${GRAFANA_PASSWORD:-admin} + - GF_USERS_ALLOW_SIGN_UP=false + volumes: + - ../config/monitoring/grafana/provisioning:/etc/grafana/provisioning:ro + - grafana_data:/var/lib/grafana + + loki: + image: grafana/loki:2.9.6 + container_name: speedcam-loki + restart: always + network_mode: host + volumes: + - ../config/monitoring/loki/loki-config.yml:/etc/loki/local-config.yaml:ro + - loki_data:/loki + command: -config.file=/etc/loki/local-config.yaml + + promtail: + image: grafana/promtail:2.9.6 + container_name: speedcam-promtail + restart: always + network_mode: host + volumes: + - ../config/monitoring/promtail/promtail-config.mon.yml:/etc/promtail/config.yml:ro + - /var/run/docker.sock:/var/run/docker.sock:ro + - /var/lib/docker/containers:/var/lib/docker/containers:ro + command: -config.file=/etc/promtail/config.yml + + celery-exporter: + image: danihodovic/celery-exporter:0.10.3 + container_name: speedcam-celery-exporter + restart: always + network_mode: host + environment: + CE_BROKER_URL: "amqp://sa:@${MQ_HOST}:5672//" + + cadvisor: + image: gcr.io/cadvisor/cadvisor:v0.49.1 + container_name: speedcam-cadvisor + restart: always + network_mode: host + volumes: + - /:/rootfs:ro + - /var/run:/var/run:ro + - /sys:/sys:ro + - /var/lib/docker/:/var/lib/docker:ro + +volumes: + prometheus_data: + grafana_data: + loki_data: +``` + +--- + +## 6. 설정 파일 + +### 6.1 환경 변수 파일 + +#### 6.1.1 backend.env + +모든 앱/워커 인스턴스에 배포. 컨테이너명을 IP로 교체. + +```bash +# Django +SECRET_KEY= +DJANGO_SETTINGS_MODULE=config.settings.prod +DEBUG=False + +# Database — ${DB_HOST}를 실제 IP로 교체 +DB_HOST=${DB_HOST} +DB_PORT=3306 +DB_USER=sa +DB_PASSWORD= +DB_NAME=speedcam +DB_NAME_VEHICLES=speedcam_vehicles +DB_NAME_DETECTIONS=speedcam_detections +DB_NAME_NOTIFICATIONS=speedcam_notifications + +# RabbitMQ — ${MQ_HOST}를 실제 IP로 교체 +CELERY_BROKER_URL=amqp://sa:@${MQ_HOST}:5672// +RABBITMQ_HOST=${MQ_HOST} +MQTT_PORT=1883 +MQTT_USER=sa +MQTT_PASS= + +# GCS / Firebase +GOOGLE_APPLICATION_CREDENTIALS=/app/credentials/gcp-cloud-storage.json +FIREBASE_CREDENTIALS=/app/credentials/firebase-service-account.json + +# Workers +OCR_CONCURRENCY=4 +ALERT_CONCURRENCY=100 +OCR_MOCK=false +FCM_MOCK=false + +# Gunicorn +GUNICORN_WORKERS=4 +GUNICORN_THREADS=2 + +# Logging +LOG_LEVEL=info + +# CORS — 프론트엔드 도메인으로 교체 +CORS_ALLOWED_ORIGINS=https://your-frontend-domain.com + +# OpenTelemetry — ${MON_HOST}를 실제 IP로 교체 +OTEL_EXPORTER_OTLP_ENDPOINT=http://${MON_HOST}:4317 +OTEL_EXPORTER_OTLP_PROTOCOL=grpc +OTEL_RESOURCE_ATTRIBUTES=service.namespace=speedcam,deployment.environment=prod +OTEL_TRACES_SAMPLER=parentbased_always_on +OTEL_PYTHON_LOG_CORRELATION=true +``` + +#### 6.1.2 mysql.env + +DB 인스턴스에 배포. + +```bash +MYSQL_ROOT_PASSWORD= +MYSQL_USER=sa +MYSQL_PASSWORD= +MYSQL_DATABASE=speedcam +``` + +#### 6.1.3 rabbitmq.env + +MQ 인스턴스에 배포. + +```bash +RABBITMQ_DEFAULT_USER=sa +RABBITMQ_DEFAULT_PASS= +``` + +### 6.2 모니터링 설정 + +#### 6.2.1 prometheus.yml + +모니터링 인스턴스에 배포. 모든 타겟을 실제 IP로 지정. + +```yaml +global: + scrape_interval: 15s + evaluation_interval: 15s + +scrape_configs: + # --- Application --- + - job_name: "django" + metrics_path: /metrics + static_configs: + - targets: ["${APP_HOST}:8000"] + + # --- OpenTelemetry Collector --- + - job_name: "otel-collector" + static_configs: + - targets: ["localhost:8889"] + + # --- Infrastructure --- + - job_name: "rabbitmq" + static_configs: + - targets: ["${MQ_HOST}:15692"] + + - job_name: "mysql" + static_configs: + - targets: ["${DB_HOST}:9104"] + + # --- Workers --- + - job_name: "celery" + static_configs: + - targets: ["localhost:9808"] + + # --- Container Resources (모든 인스턴스) --- + - job_name: "cadvisor" + static_configs: + - targets: + - "${DB_HOST}:8080" + - "${MQ_HOST}:8080" + - "${APP_HOST}:8080" + - "${OCR_HOST}:8080" + - "${ALERT_HOST}:8080" + - "localhost:8080" + relabel_configs: + - source_labels: [__address__] + target_label: instance +``` + +#### 6.2.2 otel-collector-config.yml + +모니터링 인스턴스에 배포. + +```yaml +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + +processors: + batch: + timeout: 5s + send_batch_size: 1024 + resource: + attributes: + - key: service.namespace + value: speedcam + action: upsert + +exporters: + otlp/jaeger: + endpoint: localhost:4317 + tls: + insecure: true + prometheus: + endpoint: 0.0.0.0:8889 + namespace: speedcam + resource_to_telemetry_conversion: + enabled: true + +service: + pipelines: + traces: + receivers: [otlp] + processors: [batch, resource] + exporters: [otlp/jaeger] + metrics: + receivers: [otlp] + processors: [batch, resource] + exporters: [prometheus] +``` + +#### 6.2.3 promtail-config.yml (앱/워커/DB/MQ 인스턴스 공통) + +각 인스턴스에 배포. Loki 주소를 모니터링 인스턴스 IP로 지정. + +```yaml +server: + http_listen_port: 9080 + grpc_listen_port: 0 + +positions: + filename: /tmp/positions.yaml + +clients: + - url: http://${MON_HOST}:3100/loki/api/v1/push + +scrape_configs: + - job_name: docker + docker_sd_configs: + - host: unix:///var/run/docker.sock + refresh_interval: 5s + filters: + - name: name + values: + - "speedcam-.*" + relabel_configs: + - source_labels: ["__meta_docker_container_name"] + regex: "/(.*)" + target_label: "container" + - source_labels: ["__meta_docker_container_name"] + regex: "/speedcam-(.*)" + target_label: "service" + pipeline_stages: + - regex: + expression: ".*trace_id=(?P[a-f0-9]+).*" + - labels: + trace_id: +``` + +#### 6.2.4 promtail-config.mon.yml (모니터링 인스턴스 전용) + +Loki가 같은 인스턴스이므로 localhost. + +```yaml +server: + http_listen_port: 9080 + grpc_listen_port: 0 + +positions: + filename: /tmp/positions.yaml + +clients: + - url: http://localhost:3100/loki/api/v1/push + +scrape_configs: + - job_name: docker + docker_sd_configs: + - host: unix:///var/run/docker.sock + refresh_interval: 5s + filters: + - name: name + values: + - "speedcam-.*" + relabel_configs: + - source_labels: ["__meta_docker_container_name"] + regex: "/(.*)" + target_label: "container" + - source_labels: ["__meta_docker_container_name"] + regex: "/speedcam-(.*)" + target_label: "service" + pipeline_stages: + - regex: + expression: ".*trace_id=(?P[a-f0-9]+).*" + - labels: + trace_id: +``` + +#### 6.2.5 grafana/provisioning/datasources/datasources.yml + +모니터링 인스턴스에 배포. + +```yaml +apiVersion: 1 + +datasources: + - name: Prometheus + type: prometheus + access: proxy + url: http://localhost:9090 + isDefault: true + editable: false + + - name: Jaeger + type: jaeger + access: proxy + url: http://localhost:16686 + editable: false + + - name: Loki + type: loki + access: proxy + url: http://localhost:3100 + editable: false + jsonData: + derivedFields: + - datasourceUid: jaeger + matcherRegex: "trace_id=(\\w+)" + name: TraceID + url: "$${__value.raw}" + datasourceName: Jaeger +``` + +#### 6.2.6 mysqld-exporter/.my.cnf + +DB 인스턴스에 배포. + +```ini +[client] +user=sa +password= +host=localhost +port=3306 +``` + +### 6.3 envsubst를 활용한 IP 자동 주입 + +수동으로 IP를 교체하는 대신 환경변수 파일 하나로 관리할 수 있다. + +```bash +# deploy/env/hosts.env — IP 정의 (이것만 수정) +export DB_HOST=10.178.0.11 +export MQ_HOST=10.178.0.12 +export APP_HOST=10.178.0.13 +export OCR_HOST=10.178.0.14 +export ALERT_HOST=10.178.0.15 +export MON_HOST=10.178.0.20 +``` + +```bash +# 배포 스크립트 예시 +source env/hosts.env + +# 템플릿에서 실제 설정 파일 생성 +envsubst < config/monitoring/prometheus/prometheus.yml.template \ + > config/monitoring/prometheus/prometheus.yml + +envsubst < env/backend.env.template \ + > env/backend.env + +envsubst < config/monitoring/promtail/promtail-config.yml.template \ + > config/monitoring/promtail/promtail-config.yml + +envsubst < compose/docker-compose.mon.yml.template \ + > compose/docker-compose.mon.yml +``` + +이렇게 하면 GCP 계정이 바뀌어도 `hosts.env`만 수정하고 envsubst를 다시 실행하면 된다. + +--- + +## 7. Docker 이미지 빌드 및 배포 + +### 7.1 Docker 이미지 빌드 + +```bash +# linux/amd64 플랫폼으로 빌드 (GCE용) +docker build --platform linux/amd64 \ + -t ${ARTIFACT_REGISTRY}/speedcam-main:latest \ + -f docker/Dockerfile.main . + +docker build --platform linux/amd64 \ + -t ${ARTIFACT_REGISTRY}/speedcam-ocr:latest \ + -f docker/Dockerfile.ocr . + +docker build --platform linux/amd64 \ + -t ${ARTIFACT_REGISTRY}/speedcam-alert:latest \ + -f docker/Dockerfile.alert . +``` + +### 7.2 Docker 이미지 푸시 + +```bash +docker push ${ARTIFACT_REGISTRY}/speedcam-main:latest +docker push ${ARTIFACT_REGISTRY}/speedcam-ocr:latest +docker push ${ARTIFACT_REGISTRY}/speedcam-alert:latest +``` + +--- + +## 8. 배포 순서 + +인프라 → 앱 → 모니터링 순서로 배포한다. + +### Step 1: IP 확인 + +```bash +# GCP 콘솔 또는 CLI에서 각 인스턴스 내부 IP 확인 +gcloud compute instances list --filter="name~speedcam" \ + --format="table(name, networkInterfaces[0].networkIP)" + +# 예시 출력: +# NAME INTERNAL_IP +# speedcam-db 10.178.0.11 +# speedcam-mq 10.178.0.12 +# speedcam-app 10.178.0.13 +# speedcam-ocr 10.178.0.14 +# speedcam-alert 10.178.0.15 +# speedcam-mon 10.178.0.20 +``` + +### Step 2: 설정 파일에 IP 주입 + +```bash +# backend.env, prometheus.yml, promtail-config.yml 등에서 +# ${DB_HOST}, ${MQ_HOST} 등을 실제 IP로 교체 +# (sed 또는 envsubst 사용 가능) + +# envsubst 예시 +source deploy/env/hosts.env +envsubst < deploy/env/backend.env.template > deploy/env/backend.env +envsubst < deploy/config/monitoring/prometheus/prometheus.yml.template > deploy/config/monitoring/prometheus/prometheus.yml +``` + +### Step 3: DB 인스턴스 배포 (먼저) + +```bash +gcloud compute ssh speedcam-db --zone=${GCP_ZONE} + +cd deploy/compose +docker compose -f docker-compose.db.yml up -d + +# MySQL healthy 확인 +docker compose -f docker-compose.db.yml ps +docker logs speedcam-mysql +``` + +### Step 4: MQ 인스턴스 배포 + +```bash +gcloud compute ssh speedcam-mq --zone=${GCP_ZONE} + +cd deploy/compose +docker compose -f docker-compose.mq.yml up -d + +# RabbitMQ healthy 확인 +docker compose -f docker-compose.mq.yml ps +docker logs speedcam-rabbitmq +``` + +### Step 5: App 인스턴스 배포 + +```bash +gcloud compute ssh speedcam-app --zone=${GCP_ZONE} + +cd deploy/compose +docker compose -f docker-compose.app.yml up -d + +# curl localhost:8000/health/ 로 확인 +curl localhost:8000/health/ +``` + +### Step 6: Django 마이그레이션 + +```bash +gcloud compute ssh speedcam-app --zone=${GCP_ZONE} + +docker exec speedcam-main python manage.py makemigrations vehicles detections notifications +docker exec speedcam-main python manage.py migrate --database=default --noinput +docker exec speedcam-main python manage.py migrate vehicles --database=vehicles_db --noinput +docker exec speedcam-main python manage.py migrate detections --database=detections_db --noinput +docker exec speedcam-main python manage.py migrate notifications --database=notifications_db --noinput +``` + +### Step 7: Worker 인스턴스 배포 + +```bash +# OCR Worker +gcloud compute ssh speedcam-ocr --zone=${GCP_ZONE} +cd deploy/compose +docker compose -f docker-compose.ocr.yml up -d + +# Alert Worker +gcloud compute ssh speedcam-alert --zone=${GCP_ZONE} +cd deploy/compose +docker compose -f docker-compose.alert.yml up -d +``` + +### Step 8: 모니터링 인스턴스 배포 (마지막) + +```bash +gcloud compute ssh speedcam-mon --zone=${GCP_ZONE} + +cd deploy/compose +docker compose -f docker-compose.mon.yml up -d + +# Prometheus targets 확인 +curl -s localhost:9090/api/v1/targets | python3 -c " +import json, sys +data = json.load(sys.stdin) +for t in data['data']['activeTargets']: + print(f\"{t['labels']['job']:20s} {t['labels']['instance']:30s} {t['health']}\") +" +``` + +--- + +## 9. 배포 검증 + +배포 후 아래 항목을 순서대로 확인한다. + +### 9.1 인프라 + +- [ ] MySQL 접속: `mysql -h ${DB_HOST} -u sa -p` +- [ ] RabbitMQ Management UI: `http://${MQ_HOST}:15672` (sa/) +- [ ] RabbitMQ Prometheus metrics: `curl http://${MQ_HOST}:15692/metrics | head` + +### 9.2 애플리케이션 + +- [ ] Django health: `curl http://${APP_HOST}:8000/health/` +- [ ] Django metrics: `curl http://${APP_HOST}:8000/metrics | head` +- [ ] Swagger UI: `http://${APP_HOST}:8000/swagger/` +- [ ] Flower: `http://${APP_HOST}:5555` + +### 9.3 워커 + +- [ ] OCR Worker 로그: `gcloud compute ssh speedcam-ocr --zone=${GCP_ZONE} --command="docker logs speedcam-ocr"` +- [ ] Alert Worker 로그: `gcloud compute ssh speedcam-alert --zone=${GCP_ZONE} --command="docker logs speedcam-alert"` +- [ ] RabbitMQ에서 consumer 확인: Management UI → Queues → ocr_queue, fcm_queue + +### 9.4 모니터링 + +- [ ] Prometheus targets 전부 UP: `http://${MON_HOST}:9090/targets` +- [ ] Grafana 접속: `http://${MON_HOST}:3000` (admin/admin) +- [ ] Grafana 데이터소스 3개 연결: Prometheus, Jaeger, Loki +- [ ] Jaeger에서 서비스 트레이스 확인: `http://${MON_HOST}:16686` +- [ ] Loki에서 로그 확인: Grafana → Explore → Loki → `{service="main"}` +- [ ] cAdvisor: 6개 인스턴스 모두 cadvisor:8080 → Prometheus에서 수집 확인 + +--- + +## 10. 운영 가이드 + +### 10.1 서비스 재시작 + +```bash +# 개별 인스턴스 재시작 +gcloud compute ssh speedcam-app --zone=${GCP_ZONE} +cd deploy/compose +docker compose -f docker-compose.app.yml restart + +# 전체 서비스 재시작 +gcloud compute ssh speedcam-app --zone=${GCP_ZONE} --command="cd deploy/compose && docker compose -f docker-compose.app.yml restart" +gcloud compute ssh speedcam-ocr --zone=${GCP_ZONE} --command="cd deploy/compose && docker compose -f docker-compose.ocr.yml restart" +gcloud compute ssh speedcam-alert --zone=${GCP_ZONE} --command="cd deploy/compose && docker compose -f docker-compose.alert.yml restart" +``` + +### 10.2 이미지 업데이트 배포 + +```bash +# 1. 새 이미지 빌드 & 푸시 +docker build --platform linux/amd64 -t ${ARTIFACT_REGISTRY}/speedcam-main:latest -f docker/Dockerfile.main . +docker push ${ARTIFACT_REGISTRY}/speedcam-main:latest + +# 2. 인스턴스에서 새 이미지 pull 및 재시작 +gcloud compute ssh speedcam-app --zone=${GCP_ZONE} +cd deploy/compose +docker compose -f docker-compose.app.yml pull +docker compose -f docker-compose.app.yml up -d +``` + +### 10.3 스케일링 + +```bash +# OCR Worker 추가 인스턴스 +gcloud compute instances create speedcam-ocr-2 \ + --zone=${GCP_ZONE} \ + --machine-type=e2-standard-2 \ + --network-interface=subnet=speedcam-subnet,no-address \ + --tags=speedcam \ + --scopes=cloud-platform + +# 동일한 설정 파일로 배포 +gcloud compute scp --recurse deploy/ speedcam-ocr-2:~ --zone=${GCP_ZONE} +gcloud compute ssh speedcam-ocr-2 --zone=${GCP_ZONE} +cd deploy/compose +docker compose -f docker-compose.ocr.yml up -d +``` + +### 10.4 로그 모니터링 + +```bash +# 실시간 로그 +gcloud compute ssh speedcam-app --zone=${GCP_ZONE} \ + --command="docker logs -f speedcam-main" + +# 최근 로그 +gcloud compute ssh speedcam-app --zone=${GCP_ZONE} \ + --command="docker logs --tail 100 speedcam-main" +``` + +### 10.5 IP 변경 시 수정 대상 + +GCP 계정/프로젝트 변경으로 IP가 바뀌면 아래 파일만 교체하면 된다. + +| 파일 | 교체 대상 | 배포 위치 | +|------|-----------|-----------| +| `env/backend.env` | `DB_HOST`, `RABBITMQ_HOST`, `CELERY_BROKER_URL`, `OTEL_EXPORTER_OTLP_ENDPOINT` | app, ocr, alert | +| `config/monitoring/prometheus/prometheus.yml` | 모든 targets IP | mon | +| `config/monitoring/promtail/promtail-config.yml` | Loki URL (`${MON_HOST}`) | db, mq, app, ocr, alert | +| `docker-compose.mon.yml` | `CE_BROKER_URL` 의 MQ IP | mon | + +Grafana datasources, OTel Collector config, mysqld-exporter .my.cnf는 같은 인스턴스 내 통신(localhost)이므로 IP 변경 영향 없음. + +--- + +## 11. 트러블슈팅 + +### 11.1 컨테이너 시작 실패 + +```bash +# 컨테이너 상태 확인 +gcloud compute ssh speedcam-app --zone=${GCP_ZONE} \ + --command="docker ps -a" + +# 종료된 컨테이너 로그 확인 +gcloud compute ssh speedcam-app --zone=${GCP_ZONE} \ + --command="docker logs speedcam-main" +``` + +### 11.2 DB 연결 실패 + +```bash +# MySQL 연결 테스트 +gcloud compute ssh speedcam-app --zone=${GCP_ZONE} \ + --command="docker exec speedcam-main python -c \" +import pymysql +conn = pymysql.connect(host='${DB_HOST}', user='sa', password='', database='speedcam') +print('Connected!') +conn.close()\"" + +# 방화벽 규칙 확인 +gcloud compute firewall-rules list --filter="name~speedcam" +``` + +### 11.3 MQTT 연결 실패 + +```bash +# RabbitMQ MQTT 플러그인 상태 확인 +gcloud compute ssh speedcam-mq --zone=${GCP_ZONE} \ + --command="docker exec speedcam-rabbitmq rabbitmq-plugins list | grep mqtt" + +# MQTT 포트 listening 확인 +gcloud compute ssh speedcam-mq --zone=${GCP_ZONE} \ + --command="netstat -tlnp | grep 1883" +``` + +### 11.4 이미지 Pull 실패 + +```bash +# 서비스 계정 권한 확인 +gcloud compute instances describe speedcam-app --zone=${GCP_ZONE} \ + --format='get(serviceAccounts[0].scopes)' + +# cloud-platform 스코프 필요 +# 없으면 인스턴스 재생성 또는 scope 추가 +``` + +### 11.5 host 모드 네트워크 문제 + +```bash +# 포트 사용 확인 +gcloud compute ssh speedcam-app --zone=${GCP_ZONE} \ + --command="netstat -tlnp | grep 8000" + +# 컨테이너 네트워크 모드 확인 +gcloud compute ssh speedcam-app --zone=${GCP_ZONE} \ + --command="docker inspect speedcam-main | grep NetworkMode" +``` + +### 11.6 모니터링 메트릭 수집 실패 + +```bash +# Prometheus targets 상태 확인 +curl http://${MON_HOST}:9090/api/v1/targets | python3 -m json.tool + +# cAdvisor 접근 확인 +curl http://${APP_HOST}:8080/metrics | head + +# Promtail 로그 확인 +gcloud compute ssh speedcam-app --zone=${GCP_ZONE} \ + --command="docker logs speedcam-promtail" +``` + +--- + +## 12. 리소스 정리 + +```bash +# 모든 인스턴스 삭제 +gcloud compute instances delete \ + speedcam-db speedcam-mq speedcam-app \ + speedcam-ocr speedcam-alert speedcam-mon \ + --zone=${GCP_ZONE} --quiet + +# 방화벽 규칙 삭제 +gcloud compute firewall-rules delete \ + speedcam-internal \ + speedcam-api-external \ + speedcam-mqtt-external \ + speedcam-grafana-external \ + --quiet + +# Artifact Registry 삭제 +gcloud artifacts repositories delete speedcam \ + --location=${GCP_REGION} --quiet + +# VPC 서브넷 삭제 +gcloud compute networks subnets delete speedcam-subnet \ + --region=${GCP_REGION} --quiet + +# VPC 네트워크 삭제 +gcloud compute networks delete speedcam-vpc --quiet +``` + +--- + +## 변경 이력 + +| 날짜 | 버전 | 변경 내용 | +|------|------|----------| +| 2026-01-23 | 1.0 | 초기 문서 작성 | +| 2026-02-06 | 2.0 | 멀티 인스턴스 배포 방식으로 전면 재작성 (6개 인스턴스, docker-compose + host mode, 모니터링 스택 추가) | diff --git a/docs/MONITORING.md b/docs/MONITORING.md new file mode 100644 index 00000000..65251c4f --- /dev/null +++ b/docs/MONITORING.md @@ -0,0 +1,452 @@ +# 모니터링 스택 가이드 + +## 1. 아키텍처 개요 + +``` +App Services (main, ocr-worker, alert-worker) + │ OTLP gRPC (:4317) + ▼ +OTel Collector ──traces──► Jaeger (:16686) ──► Grafana (:3000) + │ ▲ + └──metrics──► Prometheus (:9090) ──────────────┘ + ▲ │ +cAdvisor ─────────────┤ Loki (:3100) ◄── Promtail +Django /metrics ──────┤ ▲ +RabbitMQ :15692 ──────┤ Docker logs +mysqld-exporter ──────┤ +celery-exporter ──────┘ + +K6 (부하테스트) ──prometheus remote write──► Prometheus +``` + +--- + +## 2. 서비스 구성 + +### 2.1 전체 서비스 목록 + +| 서비스 | 이미지 | 포트 | 역할 | +|--------|--------|------|------| +| **otel-collector** | `otel/opentelemetry-collector-contrib:0.98.0` | 4317 (gRPC), 4318 (HTTP), 8889 | 트레이스/메트릭 수집 허브 | +| **jaeger** | `jaegertracing/all-in-one:1.57` | 16686 (UI), 14250 | 분산 트레이싱 저장/UI | +| **prometheus** | `prom/prometheus:v2.51.2` | 9090 | 메트릭 수집/저장/쿼리 | +| **grafana** | `grafana/grafana:10.4.2` | 3000 | 통합 대시보드 | +| **loki** | `grafana/loki:2.9.6` | 3100 | 로그 집계/저장 | +| **promtail** | `grafana/promtail:2.9.6` | - | Docker 로그 → Loki 전송 | +| **cadvisor** | `gcr.io/cadvisor/cadvisor:v0.49.1` | 8080 | 컨테이너 리소스 메트릭 | +| **mysqld-exporter** | `prom/mysqld-exporter:v0.15.1` | 9104 | MySQL 메트릭 노출 | +| **celery-exporter** | `danihodovic/celery-exporter:0.10.3` | 9808 | Celery 큐/태스크 메트릭 | +| **k6** | `grafana/k6:latest` | - | 부하 테스트 (on-demand) | + +### 2.2 Prometheus Scrape Targets + +| Job | Target | 수집 항목 | +|-----|--------|-----------| +| `django` | `main:8000/metrics` | HTTP 요청 수, 응답 시간, DB 쿼리 수 | +| `otel-collector` | `otel-collector:8889` | OTel에서 변환된 앱 메트릭 | +| `rabbitmq` | `rabbitmq:15692` | 큐 깊이, 메시지 rate, 커넥션, 채널 | +| `mysql` | `mysqld-exporter:9104` | 쿼리 수, 커넥션, InnoDB 버퍼, 슬로우 쿼리 | +| `celery` | `celery-exporter:9808` | 태스크 성공/실패, 실행 시간, 큐 길이 | +| `cadvisor` | `cadvisor:8080` | 컨테이너 CPU, 메모리, 네트워크 I/O | + +--- + +## 3. 실행 방법 + +### 3.1 기본 서비스만 (모니터링 없이) + +```bash +cd docker +docker compose up -d +``` + +앱은 모니터링 스택 없이도 정상 동작함. OTel Collector에 연결 실패해도 앱은 죽지 않음 (graceful fallback). + +### 3.2 모니터링 포함 + +```bash +cd docker +docker compose -f docker-compose.yml -f docker-compose.monitoring.yml up -d +``` + +### 3.3 부하 테스트 포함 (k6) + +```bash +# k6 서비스는 profiles: [loadtest] 이므로 명시적 실행 필요 +cd docker +docker compose -f docker-compose.yml -f docker-compose.monitoring.yml \ + run k6 run --out experimental-prometheus-rw /scripts/load-test.js +``` + +### 3.4 모니터링만 재시작 (앱 유지) + +```bash +cd docker +docker compose -f docker-compose.monitoring.yml restart prometheus grafana +``` + +--- + +## 4. 접속 정보 + +| 서비스 | URL | 인증 | +|--------|-----|------| +| **Grafana** | http://localhost:3000 | admin / admin | +| **Prometheus** | http://localhost:9090 | 없음 | +| **Jaeger** | http://localhost:16686 | 없음 | +| **RabbitMQ Management** | http://localhost:15672 | sa / 1234 | +| **Flower** | http://localhost:5555 | 없음 | +| **cAdvisor** | http://localhost:8080 | 없음 | +| **Django /metrics** | http://localhost:8000/metrics | 없음 | + +--- + +## 5. Prometheus 타겟 확인 + +### 5.1 UI에서 확인 + +``` +http://localhost:9090/targets +``` + +6개 job이 모두 **UP** (초록색)이면 정상. + +### 5.2 API로 확인 + +```bash +curl -s http://localhost:9090/api/v1/targets | python3 -c " +import json, sys +data = json.load(sys.stdin) +for t in data['data']['activeTargets']: + print(f\"{t['labels']['job']:20s} {t['labels']['instance']:30s} {t['health']}\") +" +``` + +### 5.3 타겟이 DOWN일 때 + +| 증상 | 원인 | 해결 | +|------|------|------| +| django DOWN | main 컨테이너 미기동 또는 django-prometheus 미설치 | `docker logs speedcam-main` 확인 | +| rabbitmq DOWN | rabbitmq_prometheus 플러그인 미활성화 | docker-compose.yml의 command에 `rabbitmq_prometheus` 포함 확인 | +| mysql DOWN | mysqld-exporter 인증 실패 | `.my.cnf` 파일의 user/password 확인 | +| celery DOWN | celery-exporter가 broker 연결 실패 | RabbitMQ 기동 여부 확인 | + +--- + +## 6. 설정 파일 구조 + +``` +docker/monitoring/ +├── otel-collector/ +│ └── otel-collector-config.yml # OTLP 수신 → Jaeger/Prometheus 내보내기 +├── prometheus/ +│ └── prometheus.yml # scrape targets 정의 +├── loki/ +│ └── loki-config.yml # 로그 저장 (7일 보존) +├── promtail/ +│ └── promtail-config.yml # Docker 로그 수집 → Loki 전송 +├── grafana/ +│ └── provisioning/ +│ ├── datasources/ +│ │ └── datasources.yml # Prometheus, Jaeger, Loki 자동 등록 +│ └── dashboards/ +│ └── dashboards.yml # 대시보드 프로비저닝 +└── mysqld-exporter/ + └── .my.cnf # MySQL 접속 정보 +``` + +--- + +## 7. OpenTelemetry 계측 + +### 7.1 앱 계측 방식 + +`opentelemetry-instrument` CLI로 자동 계측 (코드 수정 없음): + +```bash +# start_main.sh +opentelemetry-instrument \ + --service_name speedcam-api \ + gunicorn config.wsgi:application ... + +# start_ocr_worker.sh +opentelemetry-instrument \ + --service_name speedcam-ocr \ + celery -A config worker ... + +# start_alert_worker.sh +opentelemetry-instrument \ + --service_name speedcam-alert \ + celery -A config worker ... +``` + +### 7.2 자동 계측 대상 + +| 패키지 | 계측 대상 | +|--------|-----------| +| `opentelemetry-instrumentation-django` | HTTP 요청/응답, 미들웨어 | +| `opentelemetry-instrumentation-celery` | 태스크 실행, 큐 대기 시간 | +| `opentelemetry-instrumentation-pymysql` | DB 쿼리, 커넥션 | +| `opentelemetry-instrumentation-requests` | 외부 HTTP 호출 (GCS, FCM) | +| `opentelemetry-instrumentation-logging` | 로그에 trace_id/span_id 주입 | + +### 7.3 환경변수 (backend.env) + +```bash +OTEL_EXPORTER_OTLP_ENDPOINT=http://otel-collector:4317 +OTEL_EXPORTER_OTLP_PROTOCOL=grpc +OTEL_RESOURCE_ATTRIBUTES=service.namespace=speedcam,deployment.environment=dev +OTEL_TRACES_SAMPLER=parentbased_always_on +OTEL_PYTHON_LOG_CORRELATION=true +``` + +### 7.4 데이터 흐름 + +``` +Django/Celery → (OTLP gRPC) → OTel Collector + ├── traces → Jaeger + └── metrics → Prometheus (:8889) + +Django /metrics → (HTTP scrape) → Prometheus (django-prometheus 메트릭) +``` + +--- + +## 8. 로그 → 트레이스 연동 (Loki ↔ Jaeger) + +### 8.1 동작 원리 + +1. `opentelemetry-instrumentation-logging`이 로그에 `trace_id`, `span_id` 주입 +2. Django LOGGING 포맷: + ``` + INFO 2024-01-01 12:00:00 views [trace_id=abc123 span_id=def456] Request processed + ``` +3. Promtail이 로그에서 `trace_id` 추출 → Loki 라벨로 저장 +4. Grafana Loki 데이터소스의 `derivedFields`가 trace_id → Jaeger 링크 자동 생성 + +### 8.2 확인 방법 + +1. Grafana → Explore → Loki 데이터소스 선택 +2. `{service="main"}` 쿼리 실행 +3. 로그 라인의 `trace_id=` 부분 클릭 → Jaeger 트레이스로 이동 + +--- + +## 9. 유용한 PromQL 쿼리 + +### 9.1 Django + +```promql +# 초당 요청 수 (RPS) +rate(django_http_requests_total_by_method_total[5m]) + +# 응답 시간 p95 +histogram_quantile(0.95, rate(django_http_requests_latency_seconds_by_view_method_bucket[5m])) + +# HTTP 5xx 에러율 +rate(django_http_responses_total_by_status_total{status=~"5.."}[5m]) +/ rate(django_http_responses_total_by_status_total[5m]) + +# DB 쿼리 수 +rate(django_db_execute_total[5m]) +``` + +### 9.2 RabbitMQ + +```promql +# 큐별 대기 메시지 수 +rabbitmq_queue_messages{queue=~"ocr_queue|fcm_queue"} + +# 초당 메시지 발행율 +rate(rabbitmq_queue_messages_published_total[5m]) + +# Consumer 수 +rabbitmq_queue_consumers{queue=~"ocr_queue|fcm_queue"} +``` + +### 9.3 MySQL + +```promql +# 활성 커넥션 수 +mysql_global_status_threads_connected + +# 초당 쿼리 수 +rate(mysql_global_status_questions[5m]) + +# 슬로우 쿼리 수 +rate(mysql_global_status_slow_queries[5m]) +``` + +### 9.4 Celery + +```promql +# 태스크 성공/실패 수 +celery_tasks_total{state="SUCCESS"} +celery_tasks_total{state="FAILURE"} + +# 태스크 실행 시간 +celery_tasks_runtime_seconds{quantile="0.95"} + +# 큐 길이 +celery_queue_length +``` + +### 9.5 컨테이너 리소스 + +```promql +# 컨테이너별 CPU 사용률 +rate(container_cpu_usage_seconds_total{name=~"speedcam-.*"}[5m]) * 100 + +# 컨테이너별 메모리 사용량 (MB) +container_memory_usage_bytes{name=~"speedcam-.*"} / 1024 / 1024 + +# 컨테이너별 네트워크 I/O (bytes/sec) +rate(container_network_receive_bytes_total{name=~"speedcam-.*"}[5m]) +``` + +--- + +## 10. K6 부하 테스트 + 모니터링 + +### 10.1 실행 + +```bash +cd docker + +# 부하 테스트 실행 (Prometheus에 결과 기록) +docker compose -f docker-compose.yml -f docker-compose.monitoring.yml \ + run k6 run --out experimental-prometheus-rw /scripts/load-test.js +``` + +### 10.2 K6 → Prometheus 메트릭 + +k6는 `--out experimental-prometheus-rw`로 결과를 Prometheus에 직접 기록. `--web.enable-remote-write-receiver` 플래그가 Prometheus에 설정되어 있음. + +| k6 메트릭 | PromQL | 의미 | +|-----------|--------|------| +| `k6_http_req_duration_seconds` | `histogram_quantile(0.95, rate(k6_http_req_duration_seconds_bucket[1m]))` | HTTP p95 응답 시간 | +| `k6_http_reqs_total` | `rate(k6_http_reqs_total[1m])` | 초당 HTTP 요청 수 | +| `k6_vus` | `k6_vus` | 현재 VU 수 | +| `k6_http_req_failed_total` | `rate(k6_http_req_failed_total[1m])` | 실패율 | + +### 10.3 부하 테스트 중 모니터링 체크리스트 + +부하 테스트 중 Grafana에서 아래 항목을 실시간 확인: + +| 확인 항목 | 보는 곳 | 정상 기준 | +|-----------|---------|-----------| +| API 응답 시간 | Prometheus - django 메트릭 | p95 < 500ms | +| 에러율 | Prometheus - django 5xx rate | < 1% | +| RabbitMQ 큐 깊이 | Prometheus - rabbitmq 메트릭 | 지속 증가 없음 | +| Celery 태스크 처리율 | Prometheus - celery 메트릭 | 발행율 ≈ 소비율 | +| MySQL 커넥션 | Prometheus - mysql 메트릭 | < pool size 80% | +| 컨테이너 CPU/메모리 | Prometheus - cadvisor 메트릭 | CPU < 80%, Memory < 85% | +| 분산 트레이스 | Jaeger | 에러 트레이스 없음 | +| 로그 에러 | Loki | ERROR 로그 급증 없음 | + +--- + +## 11. GCP 멀티 인스턴스 배포 시 고려사항 + +현재 Docker Compose는 단일 호스트 내 가상 네트워크. 인스턴스를 분리할 경우: + +### 11.1 인스턴스 분리 구성 예시 + +| 인스턴스 | 서비스 | GCP 머신 타입 | +|----------|--------|---------------| +| app | main, flower | e2-medium | +| ocr-worker | ocr-worker | e2-standard-2 (CPU) | +| alert-worker | alert-worker | e2-small | +| db | mysql | e2-highmem-2 | +| mq | rabbitmq | e2-medium | +| monitoring | prometheus, grafana, jaeger, loki, promtail, otel-collector, cadvisor, exporters | e2-standard-2 | + +### 11.2 네트워크 연결 방법 + +**방법 A: GCP 내부 IP 직접 지정** + +```bash +# 각 인스턴스의 backend.env에서 컨테이너명 대신 내부 IP 사용 +DB_HOST=10.178.0.11 # db 인스턴스 +CELERY_BROKER_URL=amqp://sa:1234@10.178.0.12:5672// # mq 인스턴스 +OTEL_EXPORTER_OTLP_ENDPOINT=http://10.178.0.15:4317 # monitoring 인스턴스 +``` + +**방법 B: GCP 내부 DNS (같은 VPC)** + +```bash +DB_HOST=db-instance.asia-northeast3-a.c.PROJECT_ID.internal +``` + +**방법 C: GKE (Kubernetes) — 서비스 분리가 목적이면 추천** + +- Service DNS 자동 부여: `mysql.default.svc.cluster.local` +- IP 관리 불필요 +- HPA로 worker auto-scaling 가능 +- `kompose convert`로 docker-compose → k8s 변환 가능 + +### 11.3 Prometheus 멀티 인스턴스 설정 + +인스턴스가 분리되면 `prometheus.yml`에서 내부 IP 사용: + +```yaml +scrape_configs: + - job_name: "django" + static_configs: + - targets: ["10.178.0.10:8000"] # app 인스턴스 + + - job_name: "rabbitmq" + static_configs: + - targets: ["10.178.0.12:15692"] # mq 인스턴스 + + - job_name: "mysql" + static_configs: + - targets: ["10.178.0.11:9104"] # db 인스턴스 (mysqld-exporter 같이 띄움) + + - job_name: "celery" + static_configs: + - targets: ["10.178.0.13:9808"] # celery-exporter를 어디서 띄울지 결정 필요 +``` + +### 11.4 주의사항 + +- GCP 방화벽 규칙에서 모니터링 포트 (9090, 4317, 15692, 9104, 9808 등) 내부 허용 필요 +- 외부 노출하면 안 되는 포트: Prometheus (9090), Grafana (3000) → VPN 또는 IAP 터널 사용 +- 각 인스턴스에서 cAdvisor를 로컬로 띄우고, 모니터링 인스턴스의 Prometheus가 모든 cAdvisor를 scrape + +--- + +## 12. 트러블슈팅 + +### 12.1 OTel Collector 연결 실패 + +```bash +docker logs speedcam-otel-collector +# "connection refused" → Jaeger 미기동 확인 +# "context deadline exceeded" → 네트워크 문제 +``` + +### 12.2 Grafana 데이터소스 연결 실패 + +```bash +# Grafana 컨테이너에서 직접 확인 +docker exec speedcam-grafana curl -s http://prometheus:9090/-/healthy +docker exec speedcam-grafana curl -s http://loki:3100/ready +docker exec speedcam-grafana curl -s http://jaeger:16686/ +``` + +### 12.3 Promtail 로그 수집 안됨 + +```bash +docker logs speedcam-promtail +# Docker socket 접근 권한 확인 +# container name이 speedcam-* 패턴인지 확인 +``` + +### 12.4 mysqld-exporter 인증 실패 + +```bash +docker logs speedcam-mysqld-exporter +# "Access denied" → .my.cnf의 user/password 확인 +# "no configuration found" → config.my-cnf 마운트 경로 확인 +``` diff --git a/docs/PERFORMANCE_TEST_GUIDE.md b/docs/PERFORMANCE_TEST_GUIDE.md new file mode 100644 index 00000000..12d20ff5 --- /dev/null +++ b/docs/PERFORMANCE_TEST_GUIDE.md @@ -0,0 +1,864 @@ +# 성능 테스트 가이드 + +SpeedCam IoT 백엔드의 성능, 안정성, 파이프라인 처리 능력을 검증하기 위한 종합 가이드입니다. HTTP API 부하 테스트(k6)와 IoT 파이프라인 부하 테스트(MQTT)를 다룹니다. + +--- + +## 목차 + +1. [사전 준비](#1-사전-준비) +2. [모니터링 대시보드](#2-모니터링-대시보드) +3. [HTTP API 부하 테스트 (k6)](#3-http-api-부하-테스트-k6) +4. [IoT 파이프라인 부하 테스트 (MQTT)](#4-iot-파이프라인-부하-테스트-mqtt) +5. [End-to-End 검증 체크리스트](#5-end-to-end-검증-체크리스트) +6. [트러블슈팅](#6-트러블슈팅) +7. [정리 및 종료](#7-정리-및-종료) + +--- + +## 1. 사전 준비 + +### 1.1 필요 도구 + +| 도구 | 용도 | 설치 방법 | +|------|------|----------| +| Docker | 컨테이너 실행 | https://docs.docker.com/get-docker/ | +| Docker Compose | 다중 컨테이너 관리 | Docker Desktop 포함 | +| Python 3.x | MQTT 파이프라인 테스트 | 기본 설치됨 | +| paho-mqtt | MQTT 클라이언트 | `pip install paho-mqtt` | +| curl | API 요청 테스트 | 기본 설치됨 | + +### 1.2 환경 시작 + +**중요**: `docker-compose.yml`이 `speedcam-network`를 생성하고, `docker-compose.monitoring.yml`은 이를 `external: true`로 참조합니다. 반드시 순서대로 또는 `-f` 플래그로 함께 시작하세요. + +```bash +# 방법 1: 앱 + 모니터링 함께 시작 (권장) +cd docker +docker compose -f docker-compose.yml -f docker-compose.monitoring.yml up -d +``` + +```bash +# 방법 2: 순차 시작 +cd docker +docker compose -f docker-compose.yml up -d +docker compose -f docker-compose.monitoring.yml up -d +``` + +### 1.3 macOS 참고사항 + +- **cAdvisor는 Linux 전용**: `docker-compose.monitoring.yml`에 `profiles: [linux]`가 설정되어 있으므로 macOS에서는 자동 제외됩니다 +- **Linux에서 cAdvisor 포함**: + ```bash + docker compose -f docker-compose.yml -f docker-compose.monitoring.yml --profile linux up -d + ``` +- **권장 Docker Desktop 메모리**: 8GB 이상 + +### 1.4 서비스 상태 확인 + +```bash +# 전체 컨테이너 상태 확인 +docker compose -f docker-compose.yml -f docker-compose.monitoring.yml ps +``` + +**예상 상태**: 모든 컨테이너가 `Up` 상태 + +```bash +# Prometheus 타겟 상태 확인 +curl -s http://localhost:9090/api/v1/targets | python3 -c " +import json, sys +data = json.load(sys.stdin) +print('Prometheus Scrape Targets:') +for t in data['data']['activeTargets']: + status = '✓ UP' if t['health'] == 'up' else '✗ DOWN' + print(f\" {t['labels']['job']:20s} {t['health']:5s} {t['labels']['instance']}\") +" +``` + +**예상 출력**: +``` +Prometheus Scrape Targets: + django up main:8000 + otel-collector up otel-collector:8889 + rabbitmq up rabbitmq:15692 + mysql up mysqld-exporter:9104 + celery up celery-exporter:9808 + cadvisor up cadvisor:8080 (Linux only) +``` + +--- + +## 2. 모니터링 대시보드 + +### 2.1 접속 정보 + +| 서비스 | URL | 인증 | 용도 | +|--------|-----|------|------| +| **Grafana** | http://localhost:3000 | admin / admin | 통합 대시보드 (메트릭, 로그, 트레이스) | +| **Prometheus** | http://localhost:9090 | 없음 | 메트릭 저장소 및 PromQL 쿼리 | +| **Jaeger** | http://localhost:16686 | 없음 | 분산 트레이싱 UI | +| **RabbitMQ** | http://localhost:15672 | sa / 1234 | 큐 모니터링 | +| **Flower** | http://localhost:5555 | 없음 | Celery 태스크 모니터링 | + +### 2.2 Grafana 대시보드 Import + +시작 시 자동으로 대시보드가 프로비저닝되지만, 추가 대시보드는 수동 import: + +1. Grafana 접속: http://localhost:3000 +2. Dashboards → New → Import +3. Dashboard ID 입력: + +| 대시보드 | ID | 데이터소스 | 용도 | +|---------|-----|-----------|------| +| Django Prometheus | 17658 | Prometheus | HTTP 요청, 응답시간, 에러율 | +| Celery Monitoring | 17509 | Prometheus | 태스크 성공/실패, 큐 깊이 | +| RabbitMQ Overview | 10991 | Prometheus | 메시지 rate, 큐 깊이 | +| MySQL Overview | 14057 | Prometheus | 쿼리 수, 커넥션, 슬로우 쿼리 | +| K6 Load Testing | 19665 | Prometheus | k6 부하 테스트 결과 (실시간) | + +### 2.3 주요 메트릭 보기 + +**Django HTTP 메트릭** (자동 수집): +``` +http://localhost:3000/d/ +``` + +**Jaeger 트레이스** (요청 플로우 추적): +``` +http://localhost:16686 → Services → speedcam-api → 최근 트레이스 보기 +``` + +**Loki 로그** (구조화된 로그): +``` +Grafana → Explore → Data source: Loki +쿼리: {service="main"} +``` + +--- + +## 3. HTTP API 부하 테스트 (k6) + +### 3.1 테스트 대상 + +REST API 엔드포인트 검증 (IoT 파이프라인 제외): + +| 엔드포인트 | 메서드 | 용도 | +|-----------|--------|------| +| `/health/` | GET | 헬스 체크 | +| `/api/v1/vehicles/` | GET, POST, PUT, DELETE | 차량 CRUD | +| `/api/v1/detections/` | GET | 검출 목록 조회 | +| `/api/v1/notifications/` | GET | 알림 목록 조회 | + +### 3.2 설치 + +paho-mqtt는 MQTT 테스트에만 필요합니다. k6 테스트는 Docker 컨테이너에서 실행되므로 호스트 설치 불필요합니다. + +### 3.3 실행 방법 + +```bash +cd docker + +# 기본 실행: Prometheus에 결과 기록 +docker compose -f docker-compose.yml -f docker-compose.monitoring.yml \ + run k6 run --out experimental-prometheus-rw /scripts/load-test.js +``` + +**선택 사항**: 환경 변수 오버라이드 + +```bash +# 커스텀 대상 서버 지정 +docker compose -f docker-compose.yml -f docker-compose.monitoring.yml \ + run -e MAIN_SERVICE_URL=http://localhost:8000 k6 \ + run --out experimental-prometheus-rw /scripts/load-test.js +``` + +### 3.4 시나리오별 테스트 + +`load-test.js`에 3가지 시나리오가 정의되어 있습니다. 각 시나리오는 startTime이 다르므로 한 번의 실행으로 모두 테스트됩니다. + +| 시나리오 | VU 범위 | 시간 | 시작 시간 | 용도 | 기대 결과 | +|----------|---------|------|----------|------|----------| +| **smoke** | 1 | 10초 | 0s | 기본 동작 확인 | 에러 0%, 응답 <100ms | +| **average_load** | 0→10→0 | ~100초 | 15s | 평균 부하 검증 | p95 <500ms, 에러 <1% | +| **spike** | 0→30→0 | ~25초 | 120s | 스파이크 처리 능력 | p99 <1000ms, 에러 <5% | + +**총 실행 시간**: ~2분 30초 + +### 3.5 실행 중 모니터링 + +실시간으로 다른 터미널에서 메트릭 확인: + +```bash +# Prometheus UI에서 확인 +open http://localhost:9090/graph +# 쿼리: rate(k6_http_reqs_total[1m]) +``` + +```bash +# Grafana K6 대시보드 (ID: 19665) 보기 +open http://localhost:3000/d/K6-dashboard +``` + +### 3.6 결과 해석 + +k6 실행 완료 후 stdout에 요약이 표시됩니다: + +``` + checks.........................: 100.00% ✓ 5000 ✗ 0 + data_received..................: 1.2 MB ✓ + data_sent.......................: 850 kB ✓ + http_req_blocked...............: avg=1.2ms min=100µs max=50ms p(90)=2.1ms p(95)=3.5ms + http_req_connecting............: avg=0.8ms min=0µs max=40ms p(90)=1.5ms p(95)=2.2ms + http_req_duration..............: avg=125ms min=50ms max=2s p(90)=350ms p(95)=450ms + http_req_failed................: 0.00% ✓ 0 ✗ 5000 + http_req_receiving.............: avg=2.5ms min=0.5ms max=20ms p(90)=4ms p(95)=5ms + http_req_sending...............: avg=0.5ms min=0.1ms max=5ms p(90)=1ms p(95)=1ms + http_req_tls_handshaking.......: avg=0ms min=0µs max=0s p(90)=0s p(95)=0s + http_req_waiting...............: avg=122ms min=48ms max=1.9s p(90)=348ms p(95)=448ms + http_reqs.......................: 5000 199.31/s + iteration_duration.............: avg=2.5s min=2s max=30s p(90)=2.8s p(95)=3.1s + iterations......................: 5000 199.31/s +``` + +**주요 메트릭**: +- `checks`: 테스트 검증 통과율 (100%이어야 함) +- `http_req_duration` (p95): 95% 요청의 응답시간 (목표 <500ms) +- `http_req_failed`: 실패율 (0%이어야 함) +- `http_reqs`: 초당 처리한 요청 수 (RPS) + +### 3.7 맞춤형 시나리오 작성 + +`load-test.js`를 수정하여 커스텀 시나리오를 추가할 수 있습니다. 자세한 내용은 [k6 공식 문서](https://k6.io/docs/get-started/running-k6/)를 참고하세요. + +--- + +## 4. IoT 파이프라인 부하 테스트 (MQTT) + +### 4.1 테스트 대상 + +실제 IoT 카메라 동작을 시뮬레이션하여 전체 파이프라인을 검증합니다: + +``` +MQTT 메시지 발행 (Raspberry Pi 시뮬) + ↓ +RabbitMQ 큐에 저장 + ↓ +Detection 생성 (pending) + ↓ +OCR Worker (이미지 처리) + ↓ +Alert Worker (FCM 알림) + ↓ +완료 (completed) +``` + +### 4.2 사전 준비 + +**호스트에서 실행하는 경우**: +```bash +pip install paho-mqtt +``` + +### 4.3 실행 방법 + +**기본 실행** (호스트): +```bash +python docker/k6/mqtt-load-test.py \ + --workers 5 \ + --rate 2 \ + --duration 60 +``` + +**환경 변수 오버라이드**: +```bash +MQTT_HOST=localhost MQTT_PORT=1883 python docker/k6/mqtt-load-test.py \ + --workers 5 --rate 2 --duration 60 +``` + +### 4.4 테스트 단계별 파라미터 + +| 단계 | Workers | Rate(/s) | Duration | 총 메시지 | 용도 | 예상 처리 시간 | +|------|---------|----------|----------|-----------|------|----------------| +| **Smoke** | 1 | 1 | 10s | ~10 | 기본 동작 확인 | ~30초 | +| **Load** | 5 | 2 | 60s | ~600 | 일반 부하 검증 | ~5분 | +| **Stress** | 20 | 5 | 120s | ~12,000 | 시스템 한계 확인 | ~30분 | +| **Soak** | 5 | 2 | 3600s | ~36,000 | 장시간 안정성 | ~2시간 | + +**추천 시작 순서**: +1. Smoke 테스트로 연결성 확인 +2. Load 테스트로 정상 동작 확인 +3. Stress 테스트로 한계 확인 + +### 4.5 메시지 형식 + +MQTT 메시지는 다음 JSON 형식으로 발행됩니다: + +```json +{ + "camera_id": "CAM-001", + "location": "서울시 강남구 테헤란로", + "detected_speed": 95.3, + "speed_limit": 60.0, + "detected_at": "2024-01-01T12:00:00+09:00", + "image_gcs_uri": "gs://speedcam-bucket/detections/1704067200000-1234.jpg" +} +``` + +**필드 설명**: +- `camera_id`: 카메라 ID (CAM-001 ~ CAM-020) +- `location`: 카메라 위치 (실제 한국 도로명) +- `detected_speed`: 감지된 속도 (제한속도 + 5~50km/h 초과) +- `speed_limit`: 해당 구간 제한속도 (60, 80, 100, 110 중 선택) +- `detected_at`: ISO 8601 형식의 감지 시간 (한국 표준시) +- `image_gcs_uri`: GCS에 저장된 이미지 경로 (시뮬레이션용 경로) + +### 4.6 실행 중 모니터링 + +테스트 실행 중 다른 터미널에서 진행 상황을 모니터링합니다: + +**RabbitMQ 큐 상태**: +```bash +curl -s -u sa:1234 http://localhost:15672/api/queues/%2F | python3 -c " +import json, sys +queues = json.load(sys.stdin) +print('RabbitMQ Queue Status:') +for q in queues: + if q['name'] in ('detections_queue', 'ocr_queue', 'fcm_queue'): + print(f\" {q['name']:20s} messages={q.get('messages', 0):6d} consumers={q.get('consumers', 0)}\") +" +``` + +**Celery 태스크 상태**: +```bash +curl -s http://localhost:5555/api/workers | python3 -c " +import json, sys +data = json.load(sys.stdin) +print('Celery Workers:') +for worker, info in data.items(): + print(f\" {worker:30s} {info.get('status', 'unknown')}\") +" +``` + +**Jaeger 트레이스 (선택)**: +```bash +open http://localhost:16686 +# Services → speedcam-api → Detection 또는 OCR 작업 선택 +``` + +### 4.7 결과 확인 + +MQTT 테스트 완료 후 stdout에 통계가 표시됩니다: + +``` +=== MQTT Load Test Complete === +Total Published: 600 +Failed: 0 +Success Rate: 100.00% +Avg Latency: 245ms +Min Latency: 50ms +Max Latency: 1200ms +Total Duration: 65 seconds +Messages/sec: 9.23 +``` + +**해석**: +- **Success Rate**: 100%이어야 함 (메시지 발행 성공) +- **Avg Latency**: MQTT 발행 시간 (네트워크 지연) +- **Total Duration**: 부하 테스트 총 소요 시간 + +--- + +## 5. End-to-End 검증 체크리스트 + +MQTT 부하 테스트 실행 후 다음 항목들을 확인하여 파이프라인이 정상 동작하는지 검증합니다. + +### 5.1 Detection 처리 상태 + +```bash +# 전체 Detection 조회 +curl -s http://localhost:8000/api/v1/detections/ | python3 -c " +import json, sys +data = json.load(sys.stdin) +print(f\"Total Detections: {data['count']}\") +print() + +# 상태별 카운트 추출 +results = data['results'] +status_counts = {} +for r in results: + status = r.get('ocr_status', 'unknown') + status_counts[status] = status_counts.get(status, 0) + 1 + +print('Status Distribution:') +for status, count in sorted(status_counts.items()): + print(f\" {status:15s}: {count:4d}\") +" +``` + +**정상 상태**: +- 대부분이 `completed` 상태 +- 일부 `processing` 또는 `pending` (최근 생성된 건) +- `failed` 건이 있으면 OCR Worker 로그 확인: `docker logs speedcam-ocr` + +```bash +# 최근 생성된 Detection 확인 +curl -s "http://localhost:8000/api/v1/detections/?ordering=-detected_at&limit=5" | \ + python3 -m json.tool | head -50 +``` + +### 5.2 Jaeger 분산 트레이스 확인 + +트레이스를 통해 요청이 전체 시스템을 거치는 과정을 추적합니다. + +```bash +# 사용 가능한 서비스 확인 +curl -s http://localhost:16686/api/services | python3 -c " +import json, sys +data = json.load(sys.stdin) +print('Jaeger Services:') +for service in data['data']: + print(f\" - {service}\") +" +``` + +**예상 서비스**: +- `speedcam-api`: Django 메인 애플리케이션 +- `speedcam-ocr`: OCR Worker (Celery) +- `speedcam-alert`: Alert Worker (Celery) + +```bash +# 최근 트레이스 조회 (speedcam-api) +curl -s "http://localhost:16686/api/traces?service=speedcam-api&limit=3" | python3 -c " +import json, sys +data = json.load(sys.stdin) +print('Recent Traces (speedcam-api):') +for trace in data['data'][:3]: + trace_id = trace['traceID'][:16] + num_spans = len(trace['spans']) + operation = trace['spans'][0]['operationName'] + duration_ms = (trace['spans'][0]['endTime'] - trace['spans'][0]['startTime']) / 1000 + print(f\" {trace_id}... | Spans: {num_spans:2d} | {operation:30s} | {duration_ms:6.1f}ms\") +" +``` + +**정상 구성**: +- Health Check: 1-2 spans (빠름) +- Vehicle Create: 3-5 spans (DB 쿼리 포함) +- Detection Create: 5-10 spans (MQTT, RabbitMQ, DB) +- OCR Task: 7-15 spans (GCS, API, DB) + +### 5.3 Loki 로그 확인 + +구조화된 로그를 통해 각 컴포넌트의 동작을 확인합니다. + +```bash +# Loki에서 수집된 로그 스트림 확인 +curl -sG http://localhost:3100/loki/api/v1/labels | python3 -c " +import json, sys +data = json.load(sys.stdin) +print('Loki Labels:') +print(f\" Available labels: {', '.join(data['data'][:5])}...\") +" +``` + +```bash +# speedcam 컨테이너의 최근 로그 (Loki) +curl -sG "http://localhost:3100/loki/api/v1/query" \ + --data-urlencode 'query={container=~"speedcam.*"}' \ + --data-urlencode 'limit=10' | python3 -c " +import json, sys +data = json.load(sys.stdin) +streams = data['data']['result'] +print(f'Log Streams Found: {len(streams)}') +for stream in streams[:3]: + container = stream['stream'].get('container', 'unknown') + num_entries = len(stream['values']) + print(f\" {container:30s}: {num_entries} log entries\") +" +``` + +**Grafana UI에서 로그 보기**: +1. Grafana → Explore → Loki +2. 쿼리: `{container=~"speedcam.*"}` +3. 각 로그 라인의 `trace_id=` 클릭 → Jaeger 트레이스 자동 이동 + +### 5.4 RabbitMQ 큐 상태 + +MQTT 메시지 처리 파이프라인의 큐 상태를 확인합니다. + +```bash +# 큐별 메시지 수 확인 +curl -s -u sa:1234 http://localhost:15672/api/queues/%2F | python3 -c " +import json, sys +queues = json.load(sys.stdin) +print('RabbitMQ Queue Status:') +print(f\"{'Queue Name':<20} {'Messages':>10} {'Consumers':>10} {'Ready':>10} {'Unacked':>10}\") +print('-' * 60) +for q in queues: + if q['name'] in ('detections_queue', 'ocr_queue', 'fcm_queue', 'dlq_queue'): + name = q['name'] + msgs = q.get('messages', 0) + consumers = q.get('consumers', 0) + ready = q.get('messages_ready', 0) + unacked = q.get('messages_unacknowledged', 0) + print(f'{name:<20} {msgs:>10} {consumers:>10} {ready:>10} {unacked:>10}') +" +``` + +**정상 상태**: +- **detections_queue**: 0 (Detection 생성 후 즉시 처리) +- **ocr_queue**: 0-10 (처리 중) +- **fcm_queue**: 0-5 (처리 중) +- **dlq_queue**: 0 (에러 없음) +- **consumers**: 각 큐당 1 이상 (worker가 리스닝 중) + +### 5.5 Prometheus 메트릭 확인 + +시스템 성능 메트릭을 Prometheus PromQL로 확인합니다. + +```bash +# Django HTTP 요청 메트릭 +curl -s http://localhost:9090/api/v1/query --data-urlencode \ + 'query=rate(django_http_requests_total[5m])' | python3 -c " +import json, sys +data = json.load(sys.stdin) +result = data['data']['result'] +if result: + print(f'Django HTTP Request Rate: {len(result)} series found') + print(f' Current RPS: {float(result[0][\"value\"][1]):.1f}') +else: + print('No Django metrics found') +" +``` + +```bash +# Celery 태스크 메트릭 +curl -s http://localhost:9090/api/v1/query --data-urlencode \ + 'query=rate(celery_tasks_total[5m])' | python3 -c " +import json, sys +data = json.load(sys.stdin) +result = data['data']['result'] +if result: + print(f'Celery Task Rate: {len(result)} series found') + for r in result[:3]: + state = r['metric'].get('state', 'unknown') + rate = float(r['value'][1]) + print(f\" {state:10s}: {rate:.1f} tasks/sec\") +else: + print('No Celery metrics found') +" +``` + +### 5.6 DB 성능 메트릭 + +```bash +# MySQL 활성 커넥션 수 +curl -s http://localhost:9090/api/v1/query --data-urlencode \ + 'query=mysql_global_status_threads_connected' | python3 -c " +import json, sys +data = json.load(sys.stdin) +result = data['data']['result'] +if result: + value = float(result[0]['value'][1]) + print(f'Active MySQL Connections: {int(value)}') +else: + print('No MySQL metrics found') +" +``` + +### 5.7 컨테이너 리소스 사용률 + +```bash +# 각 컨테이너 CPU 사용률 (%) - cAdvisor 필요 +curl -s http://localhost:9090/api/v1/query --data-urlencode \ + 'query=rate(container_cpu_usage_seconds_total{name=~"speedcam-.*"}[5m])*100' | python3 -c " +import json, sys +data = json.load(sys.stdin) +result = data['data']['result'] +if result: + print('Container CPU Usage (%):') + for r in result[:5]: + name = r['metric'].get('name', 'unknown') + cpu_usage = float(r['value'][1]) + print(f\" {name:30s}: {cpu_usage:6.2f}%\") +else: + print('No cAdvisor metrics found (Linux only)') +" +``` + +```bash +# 각 컨테이너 메모리 사용량 (MB) - cAdvisor 필요 +curl -s http://localhost:9090/api/v1/query --data-urlencode \ + 'query=container_memory_usage_bytes{name=~"speedcam-.*"}/1024/1024' | python3 -c " +import json, sys +data = json.load(sys.stdin) +result = data['data']['result'] +if result: + print('Container Memory Usage (MB):') + for r in result[:5]: + name = r['metric'].get('name', 'unknown') + memory_mb = float(r['value'][1]) + print(f\" {name:30s}: {memory_mb:7.1f} MB\") +else: + print('No cAdvisor metrics found (Linux only)') +" +``` + +--- + +## 6. 트러블슈팅 + +### 6.1 docker-compose 실행 오류 + +**오류**: `network speedcam-network not found` + +**원인**: 모니터링 스택만 단독으로 시작함 + +**해결**: +```bash +# 앱 스택을 먼저 시작 +docker compose -f docker-compose.yml up -d + +# 그 다음 모니터링 추가 +docker compose -f docker-compose.monitoring.yml up -d + +# 또는 함께 시작 +docker compose -f docker-compose.yml -f docker-compose.monitoring.yml up -d +``` + +### 6.2 Prometheus 타겟이 DOWN + +**오류**: Prometheus 대시보드에서 일부 타겟이 DOWN 상태 + +**celery-exporter가 재시작되는 경우**: +- **원인**: RabbitMQ보다 먼저 시작되어 broker 연결 실패 +- **해결**: 자동 복구됨 (`restart: unless-stopped`). 30초 기다린 후 확인 + +**mysqld-exporter가 DOWN**: +- **원인**: MySQL보다 먼저 시작됨 +- **해결**: 자동 복구됨. 30초 기다린 후 확인 + +**django가 DOWN**: +- **원인**: 앱 시작 실패 +- **해결**: + ```bash + docker logs speedcam-main + ``` + +### 6.3 cAdvisor 시작 실패 (macOS) + +**오류**: `cadvisor: error setting oom score: open /proc/.../oom_score_adj: no such file or directory` + +**원인**: cAdvisor는 Linux 전용이며 /proc 파일시스템 필요 + +**해결**: 예상 동작. macOS에서는 자동으로 제외됨 (`profiles: [linux]`). Linux에서만 실행하세요. + +### 6.4 Jaeger에 트레이스가 없음 + +**오류**: Jaeger UI에서 데이터가 보이지 않음 + +**원인**: OTEL_EXPORTER_OTLP_ENDPOINT 미설정 + +**해결**: +```bash +# backend.env 확인 +cat docker/backend.env | grep OTEL_EXPORTER_OTLP_ENDPOINT + +# 없으면 추가 +echo 'OTEL_EXPORTER_OTLP_ENDPOINT=http://otel-collector:4317' >> docker/backend.env + +# 앱 재시작 +docker compose -f docker-compose.yml up -d --force-recreate speedcam-main +``` + +### 6.5 Loki 429 Too Many Requests + +**오류**: Loki 쿼리 실패 with 429 status + +**원인**: 로그 스트림이 너무 많음 (한계 초과) + +**해결**: +```bash +# loki-config.yml에서 한계 증가 +# docker/monitoring/loki/loki-config.yml 수정 +# limits_config: +# max_global_streams_per_user: 20000 # 기본값 10000에서 증가 +``` + +### 6.6 환경 변수 변경 후 반영 안됨 + +**오류**: backend.env 변경 후 앱에 반영 안됨 + +**원인**: Docker restart는 env_file을 다시 읽지 않음 + +**해결**: +```bash +# --force-recreate 사용 +docker compose -f docker-compose.yml up -d --force-recreate speedcam-main +``` + +### 6.7 MQTT 연결 실패 + +**오류**: `python mqtt-load-test.py` 실행 시 연결 실패 + +**원인**: RabbitMQ MQTT 플러그인 미활성화 + +**확인**: +```bash +docker logs speedcam-rabbitmq | grep -i mqtt +# "MQTT plugin loaded" 메시지가 있어야 함 +``` + +**해결** (이미 자동 활성화됨): +docker-compose.yml의 rabbitmq command에 `rabbitmq_mqtt` 플러그인이 포함되어 있는지 확인 + +### 6.8 k6 테스트 타임아웃 + +**오류**: k6 테스트 중 `dial tcp: lookup main: no such host` + +**원인**: k6 컨테이너가 speedcam-network에 연결되지 않음 + +**해결**: docker-compose.monitoring.yml에서 k6 서비스가 올바른 네트워크 설정이 있는지 확인 + +```yaml +networks: + - speedcam-network # speedcam-network 참조 +``` + +--- + +## 7. 정리 및 종료 + +### 7.1 전체 종료 및 데이터 제거 + +```bash +cd docker + +# 컨테이너 + 볼륨 완전 제거 +docker compose -f docker-compose.yml -f docker-compose.monitoring.yml down -v +``` + +### 7.2 모니터링 데이터만 삭제 + +런타임 데이터(Prometheus, Grafana, Loki)를 초기화합니다: + +```bash +rm -rf docker/monitoring/prometheus/data \ + docker/monitoring/loki/data \ + docker/monitoring/grafana/data +``` + +다시 시작하면 초기 상태로 복구됩니다. + +### 7.3 모니터링 스택만 종료 (앱 유지) + +앱은 계속 실행하고 모니터링만 종료: + +```bash +cd docker + +docker compose -f docker-compose.monitoring.yml down +``` + +나중에 모니터링을 다시 시작: + +```bash +docker compose -f docker-compose.monitoring.yml up -d +``` + +### 7.4 특정 컨테이너만 종료 + +```bash +# 개별 서비스 종료 +docker compose -f docker-compose.yml -f docker-compose.monitoring.yml \ + stop speedcam-main speedcam-ocr + +# 개별 서비스 재시작 +docker compose -f docker-compose.yml -f docker-compose.monitoring.yml \ + restart speedcam-main +``` + +--- + +## 8. 추가 자료 + +### 8.1 관련 문서 + +- [모니터링 스택 가이드](./MONITORING.md): 아키텍처, 설정, 메트릭 상세 설명 +- [배포 가이드](./DEPLOYMENT.md): GCP 멀티 인스턴스 배포 방법 +- [아키텍처 비교](./ARCHITECTURE_COMPARISON.md): 시스템 설계 이유 + +### 8.2 외부 자료 + +- [k6 공식 문서](https://k6.io/docs/) +- [Prometheus PromQL](https://prometheus.io/docs/prometheus/latest/querying/basics/) +- [Grafana 대시보드](https://grafana.com/grafana/dashboards/) +- [Jaeger 분산 트레이싱](https://www.jaegertracing.io/docs/) +- [OpenTelemetry Python](https://opentelemetry.io/docs/instrumentation/python/) + +### 8.3 자주 사용하는 명령어 + +```bash +# 모니터링 스택 전체 확인 +docker compose -f docker-compose.yml -f docker-compose.monitoring.yml ps + +# 로그 실시간 추적 +docker logs -f speedcam-main +docker logs -f speedcam-ocr +docker logs -f speedcam-alert + +# 모니터링 데이터 초기화 후 재시작 +rm -rf docker/monitoring/*/data +docker compose -f docker-compose.yml -f docker-compose.monitoring.yml up -d + +# Prometheus 메트릭 직접 조회 +curl -s http://localhost:9090/api/v1/query --data-urlencode 'query=' + +# RabbitMQ 큐 확인 +curl -s -u sa:1234 http://localhost:15672/api/queues/%2F + +# Jaeger 서비스 확인 +curl -s http://localhost:16686/api/services + +# Docker 디스크 정리 (주의: 사용하지 않는 모든 이미지/볼륨 제거) +docker system prune -a --volumes +``` + +--- + +## 9. FAQ + +**Q: k6과 MQTT 테스트 중 어느 것을 먼저 실행해야 하나요?** + +A: k6을 먼저 실행하세요. k6은 REST API만 테스트하므로 (순수 읽기 작업) 데이터베이스 상태에 영향을 주지 않습니다. MQTT 테스트는 실제 Detection을 생성하므로 나중에 실행하는 것이 좋습니다. + +**Q: 부하 테스트 중 시스템이 느려집니다. 어떻게 해야 하나요?** + +A: 정상입니다. 먼저 메트릭을 확인하세요: +1. Prometheus에서 CPU/메모리 사용률 확인 +2. RabbitMQ 큐 깊이 확인 (메시지 밀림) +3. MySQL 커넥션 풀 상태 확인 +4. 필요하면 docker-compose.yml의 리소스 제한(`resources`) 조정 + +**Q: 테스트 결과를 저장하고 싶습니다.** + +A: k6은 자동으로 Prometheus에 메트릭을 기록합니다. Prometheus → Export로 데이터를 JSON/CSV로 내보낼 수 있습니다. MQTT 테스트의 경우 stdout을 파일로 리다이렉트합니다: +```bash +python docker/k6/mqtt-load-test.py ... > test_results.txt +``` + +**Q: 모니터링 없이 성능 테스트를 실행할 수 있나요?** + +A: 가능합니다. k6 또는 MQTT 테스트 스크립트는 독립적으로 실행할 수 있습니다. 하지만 모니터링 없으면 결과를 측정하고 분석하기 어렵습니다. + +**Q: 프로덕션 환경에서 어떻게 테스트하나요?** + +A: 이 가이드는 로컬/개발 환경 기준입니다. 프로덕션 배포는 [GCP 멀티 인스턴스 배포 가이드](./DEPLOYMENT.md)를 참고하세요. 프로덕션에서는: +1. 전용 모니터링 인스턴스 사용 +2. Prometheus 보안 설정 (인증, TLS) +3. 백그라운드에서 정기적인 스모크 테스트 실행 +4. 알림 규칙(Alert) 설정 + +--- + +마지막 업데이트: 2024년 1월 diff --git a/docs/PRD.md b/docs/PRD.md new file mode 100644 index 00000000..faf015e3 --- /dev/null +++ b/docs/PRD.md @@ -0,0 +1,1645 @@ +# 과속 차량 감지 및 알림 시스템 PRD + +## 1. 프로젝트 개요 + +### 1.1 목적 +라즈베리파이 기반 엣지 디바이스에서 과속 차량을 감지하고, 번호판 OCR 인식 후 차량 소유자에게 실시간 푸시 알림을 전송하는 시스템 + +### 1.2 핵심 기능 +- 과속 차량 이미지 수집 및 저장 (GCS) +- 번호판 OCR 인식 (EasyOCR) +- FCM 푸시 알림 전송 +- 위반 내역 조회 API + +--- + +## 2. 시스템 아키텍처 + +### 2.1 아키텍처 패턴 +- **Event-Driven Microservices (Choreography Pattern)** +- 각 서비스가 자율적으로 자신의 DB를 업데이트하고 다음 이벤트를 발행 +- **서비스별 독립 데이터베이스** (Database per Service) + +### 2.2 인스턴스 배포 구조 + +``` +┌─────────────────────────────────────────────────────────────────────────────┐ +│ GCP Infrastructure │ +├─────────────────────────────────────────────────────────────────────────────┤ +│ │ +│ ┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ │ +│ │ Main Instance │ │ OCR Instance │ │ Alert Instance │ │ +│ │ (Django) │ │ (Celery Worker) │ │ (Celery Worker) │ │ +│ │ │ │ │ │ │ │ +│ │ - API Server │ │ - OCR Task │ │ - FCM Task │ │ +│ │ - MQTT Sub │ │ - GCS Download │ │ - Push Notify │ │ +│ │ - Task Dispatch │ │ - DB Update │ │ - DB Update │ │ +│ └────────┬────────┘ └────────┬────────┘ └────────┬────────┘ │ +│ │ │ │ │ +│ └──────────────────────┼──────────────────────┘ │ +│ │ │ +│ ┌────────────▼────────────┐ │ +│ │ RabbitMQ Instance │ │ +│ │ (Message Broker) │ │ +│ │ - MQTT Plugin │ │ +│ │ - AMQP Queues │ │ +│ └────────────┬────────────┘ │ +│ │ │ +│ ┌───────────────────────────────┼───────────────────────────────┐ │ +│ │ Cloud SQL (MySQL) - Multi-Database │ │ +│ │ ┌───────────┐ ┌───────────┐ ┌───────────┐ ┌───────────────┐ │ │ +│ │ │ speedcam │ │ vehicles │ │detections │ │notifications │ │ │ +│ │ │ (default) │ │ _db │ │ _db │ │ _db │ │ │ +│ │ └───────────┘ └───────────┘ └───────────┘ └───────────────┘ │ │ +│ └───────────────────────────────────────────────────────────────┘ │ +│ │ +└─────────────────────────────────────────────────────────────────────────────┘ +``` + +### 2.3 아키텍처 다이어그램 + +```mermaid +graph TB + subgraph Edge["Edge Device"] + Pi[Raspberry Pi] + end + + subgraph GCP["Google Cloud Platform"] + GCS[(Cloud Storage)] + + subgraph RMQ["RabbitMQ Instance"] + MQTT[MQTT Plugin
Port 1883] + AMQP[AMQP Broker
Port 5672] + end + + subgraph Main["Main Instance"] + Django[Django App
Ingestion & API] + end + + subgraph OCRInst["OCR Instance"] + OCR[OCR Worker
Celery prefork] + end + + subgraph AlertInst["Alert Instance"] + FCM[Notification Worker
Celery gevent] + end + + subgraph DBCluster["Cloud SQL Cluster"] + MySQL_Default[(speedcam
Django Core)] + MySQL_Vehicles[(speedcam_vehicles
Vehicles)] + MySQL_Detections[(speedcam_detections
Detections)] + MySQL_Notifications[(speedcam_notifications
Notifications)] + end + end + + subgraph External["External Services"] + Firebase[Firebase FCM] + end + + Pi -->|1. 이미지 업로드| GCS + Pi -->|2. MQTT Publish| MQTT + MQTT -->|3. MQTT Subscribe| Django + Django -->|4. pending 레코드| MySQL_Detections + Django -->|5. AMQP Publish| AMQP + AMQP -->|ocr_queue| OCR + OCR -->|6. 이미지 다운로드| GCS + OCR -->|7. 결과 업데이트| MySQL_Detections + OCR -->|7-1. 차량 조회| MySQL_Vehicles + OCR -->|8. AMQP Publish| AMQP + AMQP -->|fcm_queue| FCM + FCM -->|9. 차량/토큰 조회| MySQL_Vehicles + FCM -->|10. 푸시 전송| Firebase + FCM -->|11. 이력 저장| MySQL_Notifications +``` + +### 2.4 이벤트 흐름 (Sequence Diagram) + +```mermaid +sequenceDiagram + participant Pi as Raspberry Pi + participant GCS as Cloud Storage + participant MQTT as RabbitMQ MQTT + participant Django as Main Service + participant AMQP as RabbitMQ AMQP + participant OCR as OCR Service + participant FCM as Alert Service + participant VDB as vehicles_db + participant DDB as detections_db + participant NDB as notifications_db + + Note over Pi: 과속 차량 감지 + Pi->>GCS: 1. 이미지 업로드 + Pi->>MQTT: 2. MQTT Publish (detections/new) + + MQTT->>Django: 3. MQTT Subscribe + Django->>DDB: 4. Detection 생성 (status=pending) + Django->>AMQP: 5. Publish to ocr_exchange (Direct) + + AMQP->>OCR: 6. Consume from ocr_queue + OCR->>GCS: 7. 이미지 다운로드 + OCR->>OCR: 8. EasyOCR 실행 + OCR->>DDB: 9. 직접 업데이트 (status=completed) + OCR->>VDB: 10. 번호판으로 Vehicle 조회 + alt 차량 & FCM 토큰 존재 + OCR->>DDB: 11. vehicle_id 매핑 + OCR->>AMQP: 12. Publish to fcm_exchange (Direct) + end + + AMQP->>FCM: 13. Consume from fcm_queue + FCM->>DDB: 14. Detection 조회 + FCM->>VDB: 15. Vehicle/FCM 토큰 조회 + FCM->>FCM: 16. FCM API 호출 + FCM->>NDB: 17. 알림 이력 저장 +``` + +--- + +## 3. 기술 스택 + +### 3.1 Backend +| 구분 | 기술 | 버전 | +|------|------|------| +| Language | Python | 3.12+ | +| Framework | Django | 5.1.7 | +| API | Django REST Framework | 3.15.2 | +| WSGI Server | Gunicorn | 23.0.0 | +| Task Queue | Celery | 5.5.2 | +| Message Broker | RabbitMQ | 3.13+ | + +### 3.2 Database & Storage +| 구분 | 기술 | 버전 | +|------|------|------| +| RDBMS | MySQL | 8.0 | +| MySQL Connector | PyMySQL | 1.1.1 | +| Object Storage | Google Cloud Storage | 2.18.2 | +| Push Notification | Firebase Admin SDK | 6.8.0 | + +### 3.3 OCR & Image Processing +| 구분 | 기술 | 버전 | +|------|------|------| +| OCR Engine | EasyOCR | 1.7.2 | +| Image Processing | OpenCV | 4.10.0.84 | +| Image Library | Pillow | 11.2.1 | + +### 3.4 Monitoring (Optional) +| 구분 | 기술 | 용도 | +|------|------|------| +| Task Monitoring | Flower | Celery Task 모니터링 | +| Queue Dashboard | RabbitMQ Management | Queue 상태 확인 | + +--- + +## 4. MSA 데이터베이스 설계 + +### 4.1 Database per Service Pattern + +MSA 환경에서 각 서비스는 **독립적인 데이터베이스**를 사용하여 느슨한 결합을 유지합니다. + +| 서비스 | 데이터베이스 | 용도 | +|--------|-------------|------| +| Django Core | `speedcam` | Auth, Admin, Sessions, Celery Results | +| Vehicles Service | `speedcam_vehicles` | 차량 정보, FCM 토큰 | +| Detections Service | `speedcam_detections` | 과속 감지 내역, OCR 결과 | +| Notifications Service | `speedcam_notifications` | 알림 전송 이력 | + +### 4.2 Cross-Service Reference + +MSA에서 서비스 간 데이터 참조는 **Foreign Key 대신 ID 참조**를 사용합니다: + +``` +┌─────────────────┐ ID Reference ┌─────────────────┐ +│ vehicles_db │ ◄──────────────────── │ detections_db │ +│ │ vehicle_id │ │ +│ Vehicle │ │ Detection │ +│ - id (PK) │ │ - id (PK) │ +│ - plate_number│ │ - vehicle_id │ +│ - fcm_token │ │ - status │ +└─────────────────┘ └─────────────────┘ + │ + ID Reference + detection_id + │ + ┌────────▼────────┐ + │notifications_db │ + │ │ + │ Notification │ + │ - id (PK) │ + │ - detection_id│ + │ - status │ + └─────────────────┘ +``` + +### 4.3 Database Router + +Django의 Database Router를 사용하여 자동으로 적절한 데이터베이스로 라우팅합니다: + +```python +# config/db_router.py +class AppRouter: + """서비스별 데이터베이스 라우팅""" + + route_app_labels = { + 'vehicles': 'vehicles_db', + 'detections': 'detections_db', + 'notifications': 'notifications_db', + } + + def db_for_read(self, model, **hints): + if model._meta.app_label in self.route_app_labels: + return self.route_app_labels[model._meta.app_label] + return 'default' + + def db_for_write(self, model, **hints): + if model._meta.app_label in self.route_app_labels: + return self.route_app_labels[model._meta.app_label] + return 'default' + + def allow_relation(self, obj1, obj2, **hints): + # MSA: 다른 DB 간 FK 관계 불허 + return False + + def allow_migrate(self, db, app_label, model_name=None, **hints): + if app_label in self.route_app_labels: + return db == self.route_app_labels[app_label] + return db == 'default' +``` + +### 4.4 ER Diagram (Updated) + +```mermaid +erDiagram + %% vehicles_db + vehicles { + bigint id PK + varchar plate_number UK "번호판" + varchar owner_name "소유자명" + varchar owner_phone "연락처" + varchar fcm_token "FCM 토큰" + datetime created_at + datetime updated_at + } + + %% detections_db + detections { + bigint id PK + bigint vehicle_id "차량 ID (Reference)" + float detected_speed "감지 속도" + float speed_limit "제한 속도" + varchar location "위치" + varchar camera_id "카메라 ID" + varchar image_gcs_uri "GCS 이미지 경로" + varchar ocr_result "OCR 결과" + float ocr_confidence "OCR 신뢰도" + datetime detected_at "감지 시간" + datetime processed_at "처리 완료 시간" + enum status "pending|processing|completed|failed" + text error_message "에러 메시지" + datetime created_at + datetime updated_at + } + + %% notifications_db + notifications { + bigint id PK + bigint detection_id "감지 ID (Reference)" + varchar fcm_token "FCM 토큰" + varchar title "알림 제목" + text body "알림 내용" + datetime sent_at "전송 시간" + enum status "pending|sent|failed" + int retry_count "재시도 횟수" + text error_message "에러 메시지" + datetime created_at + } +``` + +### 4.5 DDL (Updated for MSA) + +```sql +-- ============================================= +-- Database: speedcam_vehicles +-- ============================================= +CREATE DATABASE IF NOT EXISTS speedcam_vehicles; +USE speedcam_vehicles; + +CREATE TABLE vehicles ( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + plate_number VARCHAR(20) NOT NULL UNIQUE, + owner_name VARCHAR(100), + owner_phone VARCHAR(20), + fcm_token VARCHAR(255), + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + INDEX idx_plate_number (plate_number), + INDEX idx_fcm_token (fcm_token) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; + +-- ============================================= +-- Database: speedcam_detections +-- ============================================= +CREATE DATABASE IF NOT EXISTS speedcam_detections; +USE speedcam_detections; + +CREATE TABLE detections ( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + vehicle_id BIGINT, -- ID Reference (No FK) + detected_speed FLOAT NOT NULL, + speed_limit FLOAT NOT NULL DEFAULT 60.0, + location VARCHAR(255), + camera_id VARCHAR(50), + image_gcs_uri VARCHAR(500) NOT NULL, + ocr_result VARCHAR(20), + ocr_confidence FLOAT, + detected_at DATETIME NOT NULL, + processed_at DATETIME, + status ENUM('pending', 'processing', 'completed', 'failed') DEFAULT 'pending', + error_message TEXT, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + INDEX idx_vehicle_id (vehicle_id), + INDEX idx_detected_at (detected_at), + INDEX idx_status_created (status, created_at), + INDEX idx_camera_detected (camera_id, detected_at) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; + +-- ============================================= +-- Database: speedcam_notifications +-- ============================================= +CREATE DATABASE IF NOT EXISTS speedcam_notifications; +USE speedcam_notifications; + +CREATE TABLE notifications ( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + detection_id BIGINT NOT NULL, -- ID Reference (No FK) + fcm_token VARCHAR(255), + title VARCHAR(255), + body TEXT, + sent_at DATETIME, + status ENUM('pending', 'sent', 'failed') DEFAULT 'pending', + retry_count INT DEFAULT 0, + error_message TEXT, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + INDEX idx_detection_id (detection_id), + INDEX idx_status_retry (status, retry_count), + INDEX idx_sent_at (sent_at) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; +``` + +--- + +## 5. RabbitMQ 메시징 설계 + +### 5.1 프로토콜 활용 전략 + +```mermaid +graph LR + subgraph MQTT["MQTT (Port 1883)"] + direction TB + M1[Raspberry Pi] -->|Publish| M2[detections/new] + M2 -->|Subscribe| M3[Django] + end + + subgraph AMQP["AMQP (Port 5672)"] + direction TB + A1[Django] -->|Publish| A2[ocr_exchange] + A2 -->|Route| A3[ocr_queue] + A3 -->|Consume| A4[OCR Worker] + + A4 -->|Publish| A5[fcm_exchange] + A5 -->|Route| A6[fcm_queue] + A6 -->|Consume| A7[Alert Worker] + end +``` + +| 프로토콜 | 용도 | 특징 | +|----------|------|------| +| **MQTT** | Raspberry Pi → Django | 경량 프로토콜, IoT 디바이스에 적합, QoS 1 | +| **AMQP** | Django ↔ Celery Workers | 안정적인 메시지 전달, Exchange/Queue 라우팅 | + +### 5.2 Exchange 설계 + +| Exchange | Type | Routing Key | 용도 | +|----------|------|-------------|------| +| `ocr_exchange` | **Direct** | `ocr` | OCR Task 라우팅 | +| `fcm_exchange` | **Direct** | `fcm` | 알림 Task 라우팅 | +| `dlq_exchange` | **Fanout** | - | Dead Letter 처리 | + +**Direct Exchange 선택 이유:** +- 1:1 라우팅으로 명확한 Task 분배 +- Routing Key 기반 정확한 Queue 매핑 +- Topic Exchange보다 단순하고 오버헤드 적음 + +### 5.3 Queue 설계 + +```python +# RabbitMQ Queue 설정 +QUEUES = { + 'ocr_queue': { + 'exchange': 'ocr_exchange', + 'exchange_type': 'direct', + 'routing_key': 'ocr', + 'durable': True, + 'arguments': { + 'x-dead-letter-exchange': 'dlq_exchange', + 'x-dead-letter-routing-key': 'dlq', + 'x-message-ttl': 3600000, # 1시간 + 'x-max-priority': 10, + } + }, + 'fcm_queue': { + 'exchange': 'fcm_exchange', + 'exchange_type': 'direct', + 'routing_key': 'fcm', + 'durable': True, + 'arguments': { + 'x-dead-letter-exchange': 'dlq_exchange', + 'x-dead-letter-routing-key': 'dlq', + 'x-message-ttl': 3600000, # 1시간 + } + }, + 'dlq_queue': { + 'exchange': 'dlq_exchange', + 'exchange_type': 'fanout', + 'routing_key': '', + 'durable': True, + } +} +``` + +### 5.4 Queue 설정 상세 + +| Queue | Durable | TTL | Max Priority | DLQ | Prefetch | +|-------|---------|-----|--------------|-----|----------| +| `ocr_queue` | ✅ | 1시간 | 10 | ✅ | 1 | +| `fcm_queue` | ✅ | 1시간 | - | ✅ | 10 | +| `dlq_queue` | ✅ | - | - | - | 1 | + +**Prefetch 설정 이유:** +- `ocr_queue`: 1 (CPU 집약적, 한 번에 하나씩 처리) +- `fcm_queue`: 10 (I/O 대기 시간 활용) + +### 5.5 메시지 흐름 + +``` +[Raspberry Pi] + │ + │ MQTT Publish + │ Topic: detections/new + │ QoS: 1 + ▼ +[RabbitMQ MQTT Plugin] + │ + │ 내부 변환 (MQTT → AMQP) + │ Exchange: amq.topic + │ Routing Key: detections.new + ▼ +[Django MQTT Subscriber] + │ + │ 메시지 수신 & 처리 + │ Detection 생성 (detections_db) + │ + │ AMQP Publish + │ Exchange: ocr_exchange + │ Routing Key: ocr + ▼ +[ocr_queue] + │ + │ Consumer: OCR Worker + ▼ +[OCR Worker] + │ + │ 처리 완료 + │ Detection 업데이트 (detections_db) + │ Vehicle 조회 (vehicles_db) + │ + │ AMQP Publish + │ Exchange: fcm_exchange + │ Routing Key: fcm + ▼ +[fcm_queue] + │ + │ Consumer: Alert Worker + ▼ +[Alert Worker] + │ + │ Detection 조회 (detections_db) + │ Vehicle 조회 (vehicles_db) + │ FCM 전송 완료 + │ Notification 저장 (notifications_db) + ▼ +[End] +``` + +--- + +## 6. Trade-off 분석 + +### 6.1 Choreography vs Orchestration + +| 항목 | Choreography (선택) | Orchestration | +|------|---------------------|---------------| +| **구조** | 각 서비스가 자율적으로 동작 | 중앙 Orchestrator가 제어 | +| **결합도** | 느슨한 결합 ✅ | 강한 결합 | +| **확장성** | 서비스별 독립 확장 ✅ | Orchestrator 병목 가능 | +| **장애 격리** | 한 서비스 장애가 전체에 영향 적음 ✅ | 중앙 장애 시 전체 중단 | +| **디버깅** | 흐름 추적 어려움 | 중앙에서 추적 용이 | +| **복잡도** | 이벤트 설계 복잡 | 로직 집중 관리 | + +**선택 이유:** +- 각 인스턴스(Main, OCR, Alert)가 독립적으로 배포/확장 +- OCR Worker가 직접 DB 업데이트 → 지연 시간 감소 +- 서비스 간 느슨한 결합으로 장애 격리 + +### 6.2 RabbitMQ vs Google Cloud Pub/Sub + +| 항목 | RabbitMQ (선택) | Cloud Pub/Sub | +|------|-----------------|---------------| +| **MQTT 지원** | Plugin으로 지원 ✅ | 미지원 (별도 브릿지 필요) | +| **지연 시간** | 낮음 (VPC 내부) ✅ | 상대적으로 높음 | +| **비용** | 인스턴스 비용만 ✅ | 메시지 수 기반 과금 | +| **Exchange 라우팅** | 유연한 라우팅 ✅ | 단순 Topic 기반 | +| **Priority Queue** | 지원 ✅ | 미지원 | +| **관리 부담** | 직접 운영 필요 | 완전 관리형 | +| **확장성** | 클러스터링 필요 | 자동 확장 | + +**선택 이유:** +- Raspberry Pi가 MQTT 프로토콜 사용 → RabbitMQ MQTT Plugin 활용 +- Priority Queue로 긴급 이벤트 우선 처리 +- Exchange 기반 유연한 라우팅 +- VPC 내부 통신으로 낮은 지연 시간 + +### 6.3 prefork vs gevent Pool + +| 항목 | prefork | gevent | +|------|---------|--------| +| **방식** | 멀티프로세싱 | 코루틴 (Greenlet) | +| **GIL 영향** | 회피 가능 ✅ | 영향 받음 | +| **적합한 작업** | CPU-bound ✅ | I/O-bound ✅ | +| **메모리 사용** | 프로세스당 격리 | 경량 | +| **동시성** | 프로세스 수 제한 | 수천 개 가능 | + +**적용 전략:** + +| Worker | Pool | 이유 | +|--------|------|------| +| OCR Worker | `prefork` | EasyOCR은 CPU 집약적, GIL 회피 필요 | +| Alert Worker | `gevent` | FCM API 호출은 I/O 대기, 높은 동시성 필요 | + +```bash +# OCR Worker 실행 +celery -A config worker --pool=prefork --concurrency=4 --queues=ocr_queue + +# Alert Worker 실행 +celery -A config worker --pool=gevent --concurrency=100 --queues=fcm_queue +``` + +### 6.4 Single DB vs Database per Service + +| 항목 | Single DB | Database per Service (선택) | +|------|-----------|----------------------------| +| **결합도** | 높음 (스키마 공유) | 낮음 ✅ | +| **독립 배포** | 어려움 | 가능 ✅ | +| **데이터 일관성** | 트랜잭션 보장 | 최종 일관성 | +| **조인 쿼리** | 가능 | 불가 (Application Join) | +| **스키마 변경** | 전체 영향 | 서비스별 독립 ✅ | +| **복잡도** | 단순 | 서비스 간 데이터 조회 복잡 | + +**선택 이유:** +- MSA 원칙 준수: 서비스 간 느슨한 결합 +- 독립 배포 및 확장 가능 +- 한 서비스의 DB 장애가 다른 서비스에 영향 최소화 + +--- + +## 7. 프로젝트 구조 (분리 배포용) + +### 7.1 Monorepo 구조 + +각 서비스는 **동일한 코드베이스**를 공유하되, 실행 시 역할에 따라 다른 컴포넌트만 활성화합니다. + +``` +backend/ +├── docker/ +│ ├── Dockerfile.main # Main Service (Django) +│ ├── Dockerfile.ocr # OCR Service (Celery) +│ ├── Dockerfile.alert # Alert Service (Celery) +│ ├── docker-compose.yml # 로컬 개발용 +│ ├── mysql/ +│ │ └── init.sql # Multi-DB 초기화 스크립트 +│ └── rabbitmq/ +│ └── enabled_plugins # MQTT 플러그인 활성화 +│ +├── config/ +│ ├── __init__.py +│ ├── settings/ +│ │ ├── __init__.py +│ │ ├── base.py # 공통 설정 +│ │ ├── dev.py # 개발 환경 (Multi-DB) +│ │ └── prod.py # 운영 환경 +│ ├── db_router.py # MSA Database Router +│ ├── celery.py # Celery 설정 +│ ├── urls.py +│ └── wsgi.py +│ +├── apps/ # Django Apps (서비스별 독립 DB) +│ ├── __init__.py +│ ├── vehicles/ # → vehicles_db +│ │ ├── __init__.py +│ │ ├── models.py +│ │ ├── serializers.py +│ │ ├── views.py +│ │ ├── urls.py +│ │ └── admin.py +│ ├── detections/ # → detections_db +│ │ ├── __init__.py +│ │ ├── models.py +│ │ ├── serializers.py +│ │ ├── views.py +│ │ ├── urls.py +│ │ └── admin.py +│ └── notifications/ # → notifications_db +│ ├── __init__.py +│ ├── models.py +│ ├── serializers.py +│ ├── views.py +│ ├── urls.py +│ └── admin.py +│ +├── tasks/ # Celery Tasks +│ ├── __init__.py +│ ├── ocr_tasks.py # OCR Service 전용 +│ └── notification_tasks.py # Alert Service 전용 +│ +├── core/ # 공통 유틸리티 +│ ├── __init__.py +│ ├── mqtt/ +│ │ ├── __init__.py +│ │ └── subscriber.py # Main Service 전용 +│ ├── gcs/ +│ │ ├── __init__.py +│ │ └── client.py # GCS 클라이언트 +│ └── firebase/ +│ ├── __init__.py +│ └── fcm.py # FCM 클라이언트 +│ +├── scripts/ +│ ├── start_main.sh # Main Service 시작 +│ ├── start_ocr_worker.sh # OCR Worker 시작 +│ └── start_alert_worker.sh # Alert Worker 시작 +│ +├── tests/ # 테스트 코드 +│ ├── __init__.py +│ ├── conftest.py # Pytest 설정 +│ ├── unit/ +│ │ ├── test_models.py +│ │ ├── test_serializers.py +│ │ └── test_tasks.py +│ └── integration/ +│ ├── test_api_endpoints.py +│ └── test_event_flow.py +│ +├── credentials/ # 인증 정보 (Git 제외) +│ └── .gitkeep +│ +├── manage.py +├── pytest.ini +├── requirements/ +│ ├── base.txt # 공통 의존성 +│ ├── main.txt # Main Service 의존성 +│ ├── ocr.txt # OCR Service 의존성 +│ ├── alert.txt # Alert Service 의존성 +│ └── test.txt # 테스트 의존성 +│ +└── backend.env.example +``` + +### 7.2 서비스별 의존성 + +**requirements/base.txt** (공통) +```txt +Django==5.1.7 +djangorestframework==3.15.2 +django-filter==24.3 +django-cors-headers==4.7.0 +celery==5.5.2 +django-celery-results==2.5.1 +PyMySQL==1.1.1 +python-dotenv==1.0.1 +``` + +**requirements/main.txt** (Main Service) +```txt +-r base.txt +gunicorn==23.0.0 +paho-mqtt==2.0.0 +drf-yasg==1.21.10 +flower==2.0.0 +``` + +**requirements/ocr.txt** (OCR Service) +```txt +-r base.txt +easyocr==1.7.2 +opencv-python-headless==4.10.0.84 +pillow==11.2.1 +google-cloud-storage==2.18.2 +``` + +**requirements/alert.txt** (Alert Service) +```txt +-r base.txt +firebase-admin==6.8.0 +gevent==24.2.1 +``` + +### 7.3 서비스별 Dockerfile + +**docker/Dockerfile.main** +```dockerfile +FROM python:3.12-slim + +WORKDIR /app + +# 시스템 의존성 +RUN apt-get update && apt-get install -y \ + gcc \ + default-libmysqlclient-dev \ + pkg-config \ + && rm -rf /var/lib/apt/lists/* + +# 의존성 설치 +COPY requirements/base.txt requirements/main.txt ./requirements/ +RUN pip install --no-cache-dir -r requirements/main.txt + +# 앱 복사 +COPY . . + +# 스크립트 권한 +RUN chmod +x scripts/*.sh + +EXPOSE 8000 + +CMD ["sh", "scripts/start_main.sh"] +``` + +**docker/Dockerfile.ocr** +```dockerfile +FROM python:3.12-slim + +WORKDIR /app + +# 시스템 의존성 (OpenCV) +RUN apt-get update && apt-get install -y \ + gcc \ + default-libmysqlclient-dev \ + pkg-config \ + libgl1 \ + libglib2.0-0 \ + && rm -rf /var/lib/apt/lists/* + +# 의존성 설치 +COPY requirements/base.txt requirements/ocr.txt ./requirements/ +RUN pip install --no-cache-dir -r requirements/ocr.txt + +# 앱 복사 +COPY . . + +# 스크립트 권한 +RUN chmod +x scripts/*.sh + +CMD ["sh", "scripts/start_ocr_worker.sh"] +``` + +**docker/Dockerfile.alert** +```dockerfile +FROM python:3.12-slim + +WORKDIR /app + +# 시스템 의존성 +RUN apt-get update && apt-get install -y \ + gcc \ + default-libmysqlclient-dev \ + pkg-config \ + && rm -rf /var/lib/apt/lists/* + +# 의존성 설치 +COPY requirements/base.txt requirements/alert.txt ./requirements/ +RUN pip install --no-cache-dir -r requirements/alert.txt + +# 앱 복사 +COPY . . + +# 스크립트 권한 +RUN chmod +x scripts/*.sh + +CMD ["sh", "scripts/start_alert_worker.sh"] +``` + +### 7.4 서비스 시작 스크립트 + +**scripts/start_main.sh** +```bash +#!/bin/bash +set -e + +echo "Starting Main Service (Django)..." + +# Django 마이그레이션 (모든 DB) +echo "Running migrations for all databases..." +python manage.py migrate --noinput --database=default +python manage.py migrate --noinput --database=vehicles_db +python manage.py migrate --noinput --database=detections_db +python manage.py migrate --noinput --database=notifications_db + +# Static 파일 수집 (프로덕션) +if [ "$DJANGO_SETTINGS_MODULE" = "config.settings.prod" ]; then + echo "Collecting static files..." + python manage.py collectstatic --noinput +fi + +# MQTT Subscriber 백그라운드 실행 +echo "Starting MQTT Subscriber..." +python -c " +import django +django.setup() +from core.mqtt.subscriber import start_mqtt_subscriber +start_mqtt_subscriber() +" & + +# Gunicorn 시작 +echo "Starting Gunicorn..." +gunicorn config.wsgi:application \ + --bind 0.0.0.0:8000 \ + --workers ${GUNICORN_WORKERS:-4} \ + --threads ${GUNICORN_THREADS:-2} \ + --access-logfile - \ + --error-logfile - +``` + +**scripts/start_ocr_worker.sh** +```bash +#!/bin/bash +set -e + +echo "Starting OCR Worker (Celery)..." + +# Celery Worker 시작 (prefork pool - CPU 집약적) +celery -A config worker \ + --pool=prefork \ + --concurrency=${OCR_CONCURRENCY:-4} \ + --queues=ocr_queue \ + --hostname=ocr@%h \ + --loglevel=${LOG_LEVEL:-info} +``` + +**scripts/start_alert_worker.sh** +```bash +#!/bin/bash +set -e + +echo "Starting Alert Worker (Celery)..." + +# Celery Worker 시작 (gevent pool - I/O 집약적) +celery -A config worker \ + --pool=gevent \ + --concurrency=${ALERT_CONCURRENCY:-100} \ + --queues=fcm_queue \ + --hostname=alert@%h \ + --loglevel=${LOG_LEVEL:-info} +``` + +--- + +## 8. Celery 설정 + +### 8.1 config/celery.py + +```python +import os +from celery import Celery +from kombu import Exchange, Queue + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.dev') + +app = Celery('speedcam') +app.config_from_object('django.conf:settings', namespace='CELERY') + +# Exchange 정의 +ocr_exchange = Exchange('ocr_exchange', type='direct', durable=True) +fcm_exchange = Exchange('fcm_exchange', type='direct', durable=True) +dlq_exchange = Exchange('dlq_exchange', type='fanout', durable=True) + +# Celery 설정 +app.conf.update( + # Broker + broker_connection_retry_on_startup=True, + + # Serialization + task_serializer='json', + accept_content=['json'], + result_serializer='json', + + # Timezone + timezone='Asia/Seoul', + enable_utc=True, + + # Stability + task_acks_late=True, + task_reject_on_worker_lost=True, + + # Timeout + task_time_limit=300, + task_soft_time_limit=240, + + # Prefetch + worker_prefetch_multiplier=1, +) + +# Queue 정의 +app.conf.task_queues = ( + Queue( + 'ocr_queue', + exchange=ocr_exchange, + routing_key='ocr', + queue_arguments={ + 'x-dead-letter-exchange': 'dlq_exchange', + 'x-message-ttl': 3600000, + 'x-max-priority': 10, + } + ), + Queue( + 'fcm_queue', + exchange=fcm_exchange, + routing_key='fcm', + queue_arguments={ + 'x-dead-letter-exchange': 'dlq_exchange', + 'x-message-ttl': 3600000, + } + ), + Queue( + 'dlq_queue', + exchange=dlq_exchange, + routing_key='', + ), +) + +# Task 라우팅 +app.conf.task_routes = { + 'tasks.ocr_tasks.process_ocr': { + 'queue': 'ocr_queue', + 'exchange': 'ocr_exchange', + 'routing_key': 'ocr', + }, + 'tasks.notification_tasks.send_notification': { + 'queue': 'fcm_queue', + 'exchange': 'fcm_exchange', + 'routing_key': 'fcm', + }, +} + +# Task 자동 발견 +app.autodiscover_tasks(['tasks']) +``` + +--- + +## 9. 서비스별 상세 설계 + +### 9.1 Main Service (Django) + +#### MQTT Subscriber + +```python +# core/mqtt/subscriber.py +import json +import os +import logging +import threading +import paho.mqtt.client as mqtt +from django.utils import timezone +from apps.detections.models import Detection +from tasks.ocr_tasks import process_ocr + +logger = logging.getLogger(__name__) + +class MQTTSubscriber: + def __init__(self): + self.client = mqtt.Client( + callback_api_version=mqtt.CallbackAPIVersion.VERSION2, + protocol=mqtt.MQTTv5, + client_id=f"django-main-{os.getpid()}" + ) + self.client.on_connect = self.on_connect + self.client.on_message = self.on_message + self.client.on_disconnect = self.on_disconnect + + # 인증 설정 + username = os.getenv('MQTT_USER', 'sa') + password = os.getenv('MQTT_PASS', '1234') + self.client.username_pw_set(username, password) + + def on_connect(self, client, userdata, flags, rc, properties=None): + logger.info(f"Connected to MQTT broker with code {rc}") + client.subscribe("detections/new", qos=1) + + def on_message(self, client, userdata, msg): + try: + payload = json.loads(msg.payload.decode()) + logger.info(f"Received MQTT message: {payload}") + + # 1. pending 레코드 즉시 생성 (detections_db) + detection = Detection.objects.using('detections_db').create( + camera_id=payload.get('camera_id'), + location=payload.get('location'), + detected_speed=payload['detected_speed'], + speed_limit=payload.get('speed_limit', 60.0), + detected_at=payload.get('detected_at', timezone.now()), + image_gcs_uri=payload['image_gcs_uri'], + status='pending' + ) + + logger.info(f"Created detection {detection.id} with pending status") + + # 2. OCR Task 발행 (AMQP) + process_ocr.apply_async( + args=[detection.id], + kwargs={'gcs_uri': payload['image_gcs_uri']}, + queue='ocr_queue', + priority=5 + ) + + logger.info(f"Dispatched OCR task for detection {detection.id}") + + except Exception as e: + logger.error(f"Error processing MQTT message: {e}") + + def on_disconnect(self, client, userdata, rc, properties=None): + logger.warning(f"Disconnected from MQTT broker with code {rc}") + + def start(self): + host = os.getenv('RABBITMQ_HOST', 'rabbitmq') + port = int(os.getenv('MQTT_PORT', 1883)) + logger.info(f"Connecting to MQTT broker at {host}:{port}") + self.client.connect(host, port, 60) + self.client.loop_forever() + +def start_mqtt_subscriber(): + """백그라운드 스레드에서 MQTT Subscriber 시작""" + subscriber = MQTTSubscriber() + thread = threading.Thread(target=subscriber.start, daemon=True) + thread.start() + logger.info("MQTT Subscriber started in background thread") +``` + +### 9.2 OCR Service (Celery Worker) + +```python +# tasks/ocr_tasks.py +import os +import re +import logging +from celery import shared_task +from django.db import transaction +from django.utils import timezone + +logger = logging.getLogger(__name__) + +# Mock 모드 설정 +OCR_MOCK = os.getenv('OCR_MOCK', 'false').lower() == 'true' + +def mock_ocr_result(): + """Mock OCR 결과 생성""" + import random + plates = ["12가3456", "34나5678", "56다7890", "78라9012", "90마1234"] + return random.choice(plates), round(random.uniform(0.85, 0.99), 2) + +@shared_task( + bind=True, + max_retries=3, + default_retry_delay=60, + acks_late=True +) +def process_ocr(self, detection_id: int, gcs_uri: str): + from apps.detections.models import Detection + from apps.vehicles.models import Vehicle + from tasks.notification_tasks import send_notification + + logger.info(f"Processing OCR for detection {detection_id}") + + try: + # 1. 상태를 processing으로 업데이트 (detections_db) + Detection.objects.using('detections_db').filter(id=detection_id).update( + status='processing', + updated_at=timezone.now() + ) + + if OCR_MOCK: + # Mock 모드 + plate_number, confidence = mock_ocr_result() + logger.info(f"Mock OCR result: {plate_number} ({confidence})") + else: + # 실제 OCR 처리 + from google.cloud import storage + import easyocr + + # 2. GCS 이미지 다운로드 + storage_client = storage.Client() + bucket_name = gcs_uri.split('/')[2] + blob_path = '/'.join(gcs_uri.split('/')[3:]) + + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_path) + image_bytes = blob.download_as_bytes() + + # 3. OCR 실행 + reader = easyocr.Reader(['ko', 'en'], gpu=False) + results = reader.readtext(image_bytes) + + # 4. 번호판 파싱 + plate_number, confidence = parse_plate(results) + + # 5. 직접 MySQL 업데이트 (detections_db) + with transaction.atomic(using='detections_db'): + detection = Detection.objects.using('detections_db').select_for_update().get(id=detection_id) + detection.ocr_result = plate_number + detection.ocr_confidence = confidence + detection.status = 'completed' + detection.processed_at = timezone.now() + detection.save(update_fields=[ + 'ocr_result', 'ocr_confidence', 'status', + 'processed_at', 'updated_at' + ]) + + # 6. Vehicle 매칭 (vehicles_db) + if plate_number: + vehicle = Vehicle.objects.using('vehicles_db').filter( + plate_number=plate_number + ).first() + + if vehicle: + detection.vehicle_id = vehicle.id + detection.save(update_fields=['vehicle_id', 'updated_at']) + + # 7. FCM 토큰이 있으면 알림 Task 발행 + if vehicle.fcm_token: + send_notification.apply_async( + args=[detection_id], + queue='fcm_queue' + ) + + logger.info(f"OCR completed for detection {detection_id}: {plate_number}") + return { + 'detection_id': detection_id, + 'plate': plate_number, + 'confidence': confidence + } + + except Exception as exc: + # 실패 시 에러 기록 (detections_db) + Detection.objects.using('detections_db').filter(id=detection_id).update( + status='failed', + error_message=str(exc), + updated_at=timezone.now() + ) + logger.error(f"OCR failed for detection {detection_id}: {exc}") + raise self.retry(exc=exc) + + +def parse_plate(results): + """번호판 파싱""" + pattern = r'^\d{2,3}[가-힣]\d{4}$' + + for bbox, text, conf in results: + normalized = text.replace(' ', '') + if re.match(pattern, normalized): + return normalized, conf + + return None, 0.0 +``` + +### 9.3 Alert Service (Celery Worker) + +```python +# tasks/notification_tasks.py +import os +import logging +from celery import shared_task +from django.utils import timezone + +logger = logging.getLogger(__name__) + +# Mock 모드 설정 +FCM_MOCK = os.getenv('FCM_MOCK', 'false').lower() == 'true' + +@shared_task( + bind=True, + max_retries=3, + autoretry_for=(Exception,), + retry_backoff=True, + retry_backoff_max=600, + acks_late=True +) +def send_notification(self, detection_id: int): + from apps.detections.models import Detection + from apps.vehicles.models import Vehicle + from apps.notifications.models import Notification + + logger.info(f"Sending notification for detection {detection_id}") + + try: + # 1. Detection 조회 (detections_db) + try: + detection = Detection.objects.using('detections_db').get( + id=detection_id, + status='completed' + ) + except Detection.DoesNotExist: + logger.error(f"Detection {detection_id} not found") + return {'status': 'error', 'reason': 'Detection not found'} + + # 2. Vehicle 조회 (vehicles_db) + vehicle = None + if detection.vehicle_id: + try: + vehicle = Vehicle.objects.using('vehicles_db').get(id=detection.vehicle_id) + except Vehicle.DoesNotExist: + logger.warning(f"Vehicle {detection.vehicle_id} not found") + + if not vehicle or not vehicle.fcm_token: + logger.warning(f"No FCM token for detection {detection_id}") + return {'status': 'skipped', 'reason': 'No FCM token'} + + # 3. FCM 메시지 생성 + title = f"⚠️ 과속 위반 감지: {detection.ocr_result}" + body = f"📍 위치: {detection.location or 'Unknown'}\n🚗 속도: {detection.detected_speed}km/h (제한: {detection.speed_limit}km/h)" + + if FCM_MOCK: + # Mock 모드 + response = f"mock-message-id-{detection_id}" + logger.info(f"Mock FCM sent: {title}") + else: + # 실제 FCM 전송 + import firebase_admin + from firebase_admin import messaging + + if not firebase_admin._apps: + cred_path = os.getenv('FIREBASE_CREDENTIALS') + if cred_path: + cred = firebase_admin.credentials.Certificate(cred_path) + firebase_admin.initialize_app(cred) + + message = messaging.Message( + notification=messaging.Notification(title=title, body=body), + data={ + 'detection_id': str(detection_id), + 'plate': detection.ocr_result or '', + 'speed': str(detection.detected_speed), + }, + token=vehicle.fcm_token + ) + + response = messaging.send(message) + + # 4. 성공 이력 저장 (notifications_db) + Notification.objects.using('notifications_db').create( + detection_id=detection_id, + fcm_token=vehicle.fcm_token, + title=title, + body=body, + status='sent', + sent_at=timezone.now() + ) + + logger.info(f"Notification sent for detection {detection_id}: {response}") + return {'status': 'sent', 'fcm_response': response} + + except Exception as exc: + # FCM 실패 시 이력 저장 후 재시도 (notifications_db) + try: + Notification.objects.using('notifications_db').create( + detection_id=detection_id, + status='failed', + retry_count=self.request.retries, + error_message=str(exc) + ) + except Exception: + pass + + logger.error(f"Notification failed for detection {detection_id}: {exc}") + raise +``` + +--- + +## 10. Docker Compose (로컬 개발) + +```yaml +services: + mysql: + image: mysql:8.0 + container_name: speedcam-mysql + environment: + MYSQL_ROOT_PASSWORD: root + MYSQL_DATABASE: speedcam + MYSQL_USER: sa + MYSQL_PASSWORD: "1234" + ports: + - "3306:3306" + volumes: + - mysql_data:/var/lib/mysql + - ./docker/mysql/init.sql:/docker-entrypoint-initdb.d/init.sql:ro + healthcheck: + test: ["CMD", "mysqladmin", "ping", "-h", "localhost", "-u", "sa", "-p1234"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - speedcam-network + + rabbitmq: + image: rabbitmq:3.13-management + container_name: speedcam-rabbitmq + environment: + RABBITMQ_DEFAULT_USER: sa + RABBITMQ_DEFAULT_PASS: "1234" + RABBITMQ_MQTT_LISTENERS_TCP_DEFAULT: 1883 + RABBITMQ_MQTT_ALLOW_ANONYMOUS: "false" + RABBITMQ_MQTT_DEFAULT_USER: sa + RABBITMQ_MQTT_DEFAULT_PASS: "1234" + RABBITMQ_MQTT_VHOST: / + RABBITMQ_MQTT_EXCHANGE: amq.topic + ports: + - "5672:5672" # AMQP + - "1883:1883" # MQTT + - "15672:15672" # Management UI + volumes: + - rabbitmq_data:/var/lib/rabbitmq + - ./rabbitmq/enabled_plugins:/etc/rabbitmq/enabled_plugins + healthcheck: + test: ["CMD", "rabbitmq-diagnostics", "check_running"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - speedcam-network + + main: + build: + context: .. + dockerfile: docker/Dockerfile.main + container_name: speedcam-main + environment: + - DJANGO_SETTINGS_MODULE=config.settings.dev + - DB_HOST=mysql + - DB_PORT=3306 + - DB_NAME=speedcam + - DB_NAME_VEHICLES=speedcam_vehicles + - DB_NAME_DETECTIONS=speedcam_detections + - DB_NAME_NOTIFICATIONS=speedcam_notifications + - DB_USER=sa + - DB_PASSWORD=1234 + - CELERY_BROKER_URL=amqp://sa:1234@rabbitmq:5672// + - RABBITMQ_HOST=rabbitmq + - MQTT_PORT=1883 + - MQTT_USER=sa + - MQTT_PASS=1234 + - OCR_MOCK=true + - FCM_MOCK=true + ports: + - "8000:8000" + volumes: + - ../credentials:/app/credentials:ro + depends_on: + mysql: + condition: service_healthy + rabbitmq: + condition: service_healthy + networks: + - speedcam-network + + ocr-worker: + build: + context: .. + dockerfile: docker/Dockerfile.ocr + container_name: speedcam-ocr + environment: + - DJANGO_SETTINGS_MODULE=config.settings.dev + - DB_HOST=mysql + - DB_PORT=3306 + - DB_NAME=speedcam + - DB_NAME_VEHICLES=speedcam_vehicles + - DB_NAME_DETECTIONS=speedcam_detections + - DB_NAME_NOTIFICATIONS=speedcam_notifications + - DB_USER=sa + - DB_PASSWORD=1234 + - CELERY_BROKER_URL=amqp://sa:1234@rabbitmq:5672// + - OCR_CONCURRENCY=2 + - OCR_MOCK=true + volumes: + - ../credentials:/app/credentials:ro + depends_on: + - main + - rabbitmq + networks: + - speedcam-network + + alert-worker: + build: + context: .. + dockerfile: docker/Dockerfile.alert + container_name: speedcam-alert + environment: + - DJANGO_SETTINGS_MODULE=config.settings.dev + - DB_HOST=mysql + - DB_PORT=3306 + - DB_NAME=speedcam + - DB_NAME_VEHICLES=speedcam_vehicles + - DB_NAME_DETECTIONS=speedcam_detections + - DB_NAME_NOTIFICATIONS=speedcam_notifications + - DB_USER=sa + - DB_PASSWORD=1234 + - CELERY_BROKER_URL=amqp://sa:1234@rabbitmq:5672// + - ALERT_CONCURRENCY=50 + - FCM_MOCK=true + volumes: + - ../credentials:/app/credentials:ro + depends_on: + - main + - rabbitmq + networks: + - speedcam-network + + flower: + build: + context: .. + dockerfile: docker/Dockerfile.main + container_name: speedcam-flower + command: celery -A config flower --port=5555 + environment: + - DJANGO_SETTINGS_MODULE=config.settings.dev + - CELERY_BROKER_URL=amqp://sa:1234@rabbitmq:5672// + ports: + - "5555:5555" + depends_on: + - rabbitmq + networks: + - speedcam-network + +volumes: + mysql_data: + rabbitmq_data: + +networks: + speedcam-network: + driver: bridge +``` + +### MySQL 초기화 스크립트 + +**docker/mysql/init.sql** +```sql +-- MSA용 데이터베이스 생성 +CREATE DATABASE IF NOT EXISTS speedcam_vehicles; +CREATE DATABASE IF NOT EXISTS speedcam_detections; +CREATE DATABASE IF NOT EXISTS speedcam_notifications; + +-- 사용자 권한 부여 +GRANT ALL PRIVILEGES ON speedcam_vehicles.* TO 'sa'@'%'; +GRANT ALL PRIVILEGES ON speedcam_detections.* TO 'sa'@'%'; +GRANT ALL PRIVILEGES ON speedcam_notifications.* TO 'sa'@'%'; + +FLUSH PRIVILEGES; +``` + +### RabbitMQ 설정 파일 + +**rabbitmq/enabled_plugins** +``` +[rabbitmq_management, rabbitmq_mqtt]. +``` + +--- + +## 11. 환경 변수 + +```env +# backend.env.example + +# =========================================== +# Django 설정 +# =========================================== +DJANGO_SECRET_KEY=your-secret-key-here +DJANGO_SETTINGS_MODULE=config.settings.dev +DEBUG=True + +# =========================================== +# 데이터베이스 설정 (MySQL - MSA Multi-DB) +# =========================================== +DB_HOST=mysql +DB_PORT=3306 +DB_USER=sa +DB_PASSWORD=1234 + +# 서비스별 데이터베이스 +DB_NAME=speedcam +DB_NAME_VEHICLES=speedcam_vehicles +DB_NAME_DETECTIONS=speedcam_detections +DB_NAME_NOTIFICATIONS=speedcam_notifications + +# =========================================== +# RabbitMQ / Celery 설정 +# =========================================== +CELERY_BROKER_URL=amqp://sa:1234@rabbitmq:5672// +RABBITMQ_HOST=rabbitmq + +# =========================================== +# MQTT 설정 (RabbitMQ MQTT Plugin) +# =========================================== +MQTT_PORT=1883 +MQTT_USER=sa +MQTT_PASS=1234 + +# =========================================== +# GCS (Google Cloud Storage) 설정 +# =========================================== +GCS_BUCKET_NAME=your-gcs-bucket-name +GOOGLE_APPLICATION_CREDENTIALS=/app/credentials/gcp-cloud-storage.json + +# =========================================== +# Firebase 설정 (FCM Push Notification) +# =========================================== +FIREBASE_CREDENTIALS=/app/credentials/firebase-service-account.json + +# =========================================== +# Celery Worker 설정 +# =========================================== +OCR_CONCURRENCY=2 +OCR_MOCK=true + +ALERT_CONCURRENCY=50 +FCM_MOCK=true + +# =========================================== +# Gunicorn 설정 +# =========================================== +GUNICORN_WORKERS=4 +GUNICORN_THREADS=2 + +# =========================================== +# 로깅 설정 +# =========================================== +LOG_LEVEL=info + +# =========================================== +# CORS 설정 +# =========================================== +CORS_ALLOWED_ORIGINS=http://localhost:5173,http://localhost:3000 +``` + +--- + +## 12. 핵심 설계 원칙 + +### 12.1 Choreography Pattern +- 각 서비스가 **자기 할 일만 하고 다음 이벤트를 발행** +- OCR Worker가 직접 MySQL 업데이트 (Main Service를 거치지 않음) +- 서비스 간 느슨한 결합 → 독립적 확장/배포 가능 + +### 12.2 Database per Service +- 각 서비스는 **자신만의 데이터베이스** 사용 +- ForeignKey 대신 **ID Reference**로 서비스 간 데이터 참조 +- 한 서비스의 DB 장애가 다른 서비스에 영향 최소화 + +### 12.3 데이터 손실 방지 +- Main Service가 MQTT 메시지 수신 시 **즉시 pending 레코드 생성** +- OCR 실패해도 "무언가 감지되었다"는 사실 추적 가능 +- DLQ로 실패한 Task 별도 관리 + +### 12.4 프로토콜 분리 +- **MQTT**: IoT 디바이스(Raspberry Pi) 통신용 경량 프로토콜 +- **AMQP**: 백엔드 서비스 간 안정적인 메시지 전달 + +### 12.5 GIL 병목 회피 +- **OCR Worker**: `prefork` pool (multiprocessing) - CPU 집약적 +- **Alert Worker**: `gevent` pool (I/O 멀티플렉싱) - I/O 집약적 + +### 12.6 독립 배포 +- 각 서비스(Main, OCR, Alert)가 별도 인스턴스에 배포 +- 공유 코드베이스 + 서비스별 Dockerfile/의존성 +- RabbitMQ를 통한 서비스 간 통신 + +--- + +## 13. 변경 이력 + +| 버전 | 날짜 | 변경 내용 | +|------|------|----------| +| 1.0 | 2024-01 | 초기 PRD 작성 | +| 2.0 | 2026-01 | MSA Database 분리 아키텍처 적용 | +| | | - Database per Service 패턴 도입 | +| | | - ForeignKey → ID Reference 변경 | +| | | - Database Router 구현 | +| | | - Python 3.12로 버전 업데이트 | +| | | - DataDog 관련 설정 제거 (Optional) | +| | | - Mock 모드 추가 (OCR_MOCK, FCM_MOCK) | diff --git a/manage.py b/manage.py index a8034669..ff6440c6 100755 --- a/manage.py +++ b/manage.py @@ -6,7 +6,7 @@ def main(): """Run administrative tasks.""" - os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings.dev') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.dev") try: from django.core.management import execute_from_command_line except ImportError as exc: diff --git a/media/ocr_debug_steps/1d1baa2e-434f-48d6-b167-b01de292f7a5_image13/1377959a/original_from_pil.png b/media/ocr_debug_steps/1d1baa2e-434f-48d6-b167-b01de292f7a5_image13/1377959a/original_from_pil.png new file mode 100644 index 00000000..b5d14efb Binary files /dev/null and b/media/ocr_debug_steps/1d1baa2e-434f-48d6-b167-b01de292f7a5_image13/1377959a/original_from_pil.png differ diff --git a/media/ocr_debug_steps/1d1baa2e-434f-48d6-b167-b01de292f7a5_image13/1377959a/roi_crop.png b/media/ocr_debug_steps/1d1baa2e-434f-48d6-b167-b01de292f7a5_image13/1377959a/roi_crop.png new file mode 100644 index 00000000..0a5ff6a7 Binary files /dev/null and b/media/ocr_debug_steps/1d1baa2e-434f-48d6-b167-b01de292f7a5_image13/1377959a/roi_crop.png differ diff --git a/media/ocr_debug_steps/1d1baa2e-434f-48d6-b167-b01de292f7a5_image13/71610ac4/original_from_pil.png b/media/ocr_debug_steps/1d1baa2e-434f-48d6-b167-b01de292f7a5_image13/71610ac4/original_from_pil.png new file mode 100644 index 00000000..b5d14efb Binary files /dev/null and b/media/ocr_debug_steps/1d1baa2e-434f-48d6-b167-b01de292f7a5_image13/71610ac4/original_from_pil.png differ diff --git a/media/ocr_debug_steps/1d1baa2e-434f-48d6-b167-b01de292f7a5_image13/71610ac4/roi_crop.png b/media/ocr_debug_steps/1d1baa2e-434f-48d6-b167-b01de292f7a5_image13/71610ac4/roi_crop.png new file mode 100644 index 00000000..0a5ff6a7 Binary files /dev/null and b/media/ocr_debug_steps/1d1baa2e-434f-48d6-b167-b01de292f7a5_image13/71610ac4/roi_crop.png differ diff --git a/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/651e419d/original_from_pil.png b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/651e419d/original_from_pil.png new file mode 100644 index 00000000..09ace294 Binary files /dev/null and b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/651e419d/original_from_pil.png differ diff --git a/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/651e419d/roi_crop.png b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/651e419d/roi_crop.png new file mode 100644 index 00000000..643585d2 Binary files /dev/null and b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/651e419d/roi_crop.png differ diff --git a/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/7bd5c442/original_from_pil.png b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/7bd5c442/original_from_pil.png new file mode 100644 index 00000000..09ace294 Binary files /dev/null and b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/7bd5c442/original_from_pil.png differ diff --git a/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/7bd5c442/roi_crop.png b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/7bd5c442/roi_crop.png new file mode 100644 index 00000000..643585d2 Binary files /dev/null and b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/7bd5c442/roi_crop.png differ diff --git a/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/877f8065/original_from_pil.png b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/877f8065/original_from_pil.png new file mode 100644 index 00000000..09ace294 Binary files /dev/null and b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/877f8065/original_from_pil.png differ diff --git a/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/877f8065/roi_crop.png b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/877f8065/roi_crop.png new file mode 100644 index 00000000..643585d2 Binary files /dev/null and b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/877f8065/roi_crop.png differ diff --git a/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/90f6c66d/original_from_pil.png b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/90f6c66d/original_from_pil.png new file mode 100644 index 00000000..09ace294 Binary files /dev/null and b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/90f6c66d/original_from_pil.png differ diff --git a/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/a746738e/original_from_pil.png b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/a746738e/original_from_pil.png new file mode 100644 index 00000000..09ace294 Binary files /dev/null and b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/a746738e/original_from_pil.png differ diff --git a/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/a746738e/roi_crop.png b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/a746738e/roi_crop.png new file mode 100644 index 00000000..643585d2 Binary files /dev/null and b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/a746738e/roi_crop.png differ diff --git a/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/e095397c/original_from_pil.png b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/e095397c/original_from_pil.png new file mode 100644 index 00000000..09ace294 Binary files /dev/null and b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/e095397c/original_from_pil.png differ diff --git a/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/e095397c/roi_crop.png b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/e095397c/roi_crop.png new file mode 100644 index 00000000..643585d2 Binary files /dev/null and b/media/ocr_debug_steps/96de5a07-b547-4c61-ae8c-fa40ca7410f8_image14/e095397c/roi_crop.png differ diff --git a/mysql.env.example b/mysql.env.example new file mode 100644 index 00000000..a6ec216f --- /dev/null +++ b/mysql.env.example @@ -0,0 +1,11 @@ +# =========================================== +# MySQL Environment Variables +# =========================================== +# 사용법: 이 파일을 mysql.env로 복사하여 사용 +# cp mysql.env.example mysql.env + +MYSQL_ROOT_PASSWORD=root +MYSQL_DATABASE=speedcam +MYSQL_USER=sa +MYSQL_PASSWORD=1234 + diff --git a/post/__init__.py b/post/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/post/admin.py b/post/admin.py deleted file mode 100644 index 8c38f3f3..00000000 --- a/post/admin.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.contrib import admin - -# Register your models here. diff --git a/post/apps.py b/post/apps.py deleted file mode 100644 index a7c8c223..00000000 --- a/post/apps.py +++ /dev/null @@ -1,6 +0,0 @@ -from django.apps import AppConfig - - -class PostConfig(AppConfig): - default_auto_field = "django.db.models.BigAutoField" - name = "post" diff --git a/post/migrations/__init__.py b/post/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/post/models.py b/post/models.py deleted file mode 100644 index 71a83623..00000000 --- a/post/models.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.db import models - -# Create your models here. diff --git a/post/tests.py b/post/tests.py deleted file mode 100644 index 7ce503c2..00000000 --- a/post/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/post/urls.py b/post/urls.py deleted file mode 100644 index 9ad03348..00000000 --- a/post/urls.py +++ /dev/null @@ -1,7 +0,0 @@ -from backend.urls import path - -from . import views - -urlpatterns = [ - path("", views.index), -] \ No newline at end of file diff --git a/post/views.py b/post/views.py deleted file mode 100644 index dc3b5512..00000000 --- a/post/views.py +++ /dev/null @@ -1,7 +0,0 @@ -from django.shortcuts import render -from django.http import HttpResponse - - -def index(request): - return HttpResponse("설정이 완료되었습니다.") -# Create your views here. diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 00000000..a0bd98ea --- /dev/null +++ b/pytest.ini @@ -0,0 +1,11 @@ +[pytest] +DJANGO_SETTINGS_MODULE = config.settings.dev +python_files = test_*.py +python_classes = Test* +python_functions = test_* +addopts = -v --tb=short +testpaths = tests +markers = + django_db: mark test to use Django database + slow: mark test as slow running + diff --git a/rabbitmq.env.example b/rabbitmq.env.example new file mode 100644 index 00000000..8aa87f6c --- /dev/null +++ b/rabbitmq.env.example @@ -0,0 +1,9 @@ +# =========================================== +# RabbitMQ Environment Variables +# =========================================== +# 사용법: 이 파일을 rabbitmq.env로 복사하여 사용 +# cp rabbitmq.env.example rabbitmq.env + +RABBITMQ_DEFAULT_USER=sa +RABBITMQ_DEFAULT_PASS=1234 + diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 3eed2108..00000000 --- a/requirements.txt +++ /dev/null @@ -1,18 +0,0 @@ -asgiref==3.8.1 -cffi==1.17.1 -click==8.1.8 -cryptography==44.0.2 -Django==5.1.7 -djangorestframework==3.15.2 -drf-yasg==1.21.10 -inflection==0.5.1 -mysqlclient==2.2.7 -packaging==24.2 -pycparser==2.22 -PyMySQL==1.1.1 -python-dotenv==1.0.1 -pytz==2025.1 -PyYAML==6.0.2 -sqlparse==0.5.3 -typing_extensions==4.12.2 -uritemplate==4.1.1 diff --git a/requirements/alert.txt b/requirements/alert.txt new file mode 100644 index 00000000..bb8e51a9 --- /dev/null +++ b/requirements/alert.txt @@ -0,0 +1,9 @@ +# Alert Service Requirements +-r base.txt + +# Firebase (FCM) +firebase-admin==6.8.0 + +# Gevent (I/O Pool) +gevent==24.2.1 + diff --git a/requirements/base.txt b/requirements/base.txt new file mode 100644 index 00000000..0a0c47b5 --- /dev/null +++ b/requirements/base.txt @@ -0,0 +1,37 @@ +# Base Requirements (공통) + +# Core Django +Django==5.1.7 +djangorestframework==3.15.2 +django-cors-headers==4.7.0 +django-filter==24.3 + +# Database +PyMySQL==1.1.1 +cryptography>=42.0.0 + +# Celery (Async Task Queue) +celery==5.5.2 +django_celery_results==2.6.0 +kombu==5.5.3 + +# Utilities +python-dotenv==1.0.1 +python-dateutil==2.9.0.post0 +pytz==2025.1 + +# API Documentation +drf-yasg==1.21.10 +PyYAML==6.0.2 + +# Observability (OpenTelemetry) +opentelemetry-distro +opentelemetry-exporter-otlp +opentelemetry-instrumentation-django +opentelemetry-instrumentation-celery +opentelemetry-instrumentation-pymysql +opentelemetry-instrumentation-requests +opentelemetry-instrumentation-logging + +# Prometheus Metrics +django-prometheus diff --git a/requirements/dev.txt b/requirements/dev.txt new file mode 100644 index 00000000..cdb3efd6 --- /dev/null +++ b/requirements/dev.txt @@ -0,0 +1,14 @@ +# Development Requirements (All-in-One) +-r main.txt +-r ocr.txt +-r alert.txt + +# Testing +pytest==8.3.3 +pytest-django==4.9.0 + +# Code Quality +flake8==7.1.1 +black==24.10.0 +isort==5.13.2 + diff --git a/requirements/main.txt b/requirements/main.txt new file mode 100644 index 00000000..3c6012a1 --- /dev/null +++ b/requirements/main.txt @@ -0,0 +1,12 @@ +# Main Service Requirements +-r base.txt + +# WSGI Server +gunicorn==23.0.0 + +# MQTT Client +paho-mqtt==2.0.0 + +# Celery Monitoring +flower==2.0.1 + diff --git a/requirements/ocr.txt b/requirements/ocr.txt new file mode 100644 index 00000000..d163a00c --- /dev/null +++ b/requirements/ocr.txt @@ -0,0 +1,11 @@ +# OCR Service Requirements +-r base.txt + +# OCR & Image Processing +easyocr==1.7.2 +opencv-python-headless==4.10.0.84 +pillow==11.2.1 + +# Google Cloud Storage +google-cloud-storage==2.18.2 + diff --git a/requirements/test.txt b/requirements/test.txt new file mode 100644 index 00000000..58653bf7 --- /dev/null +++ b/requirements/test.txt @@ -0,0 +1,13 @@ +# Test Requirements +-r base.txt + +# Testing Framework +pytest==8.3.4 +pytest-django==4.9.0 +pytest-cov==6.0.0 +pytest-mock==3.14.0 + +# Factory for test data +factory-boy==3.3.1 +Faker==33.1.0 + diff --git a/scripts/start_alert_worker.sh b/scripts/start_alert_worker.sh new file mode 100644 index 00000000..24bda152 --- /dev/null +++ b/scripts/start_alert_worker.sh @@ -0,0 +1,14 @@ +#!/bin/bash +set -e + +echo "Starting Alert Worker (Celery)..." + +# Celery Worker 시작 (gevent pool - I/O 집약적) +opentelemetry-instrument \ + --service_name speedcam-alert \ + celery -A config worker \ + --pool=gevent \ + --concurrency=${ALERT_CONCURRENCY:-100} \ + --queues=fcm_queue \ + --hostname=alert@%h \ + --loglevel=${LOG_LEVEL:-info} diff --git a/scripts/start_main.sh b/scripts/start_main.sh new file mode 100644 index 00000000..187a61d6 --- /dev/null +++ b/scripts/start_main.sh @@ -0,0 +1,55 @@ +#!/bin/bash +set -e + +echo "Starting Main Service (Django)..." + +# ================================================== +# MSA Database Migration +# 각 서비스별 DB에 마이그레이션 실행 +# ================================================== + +echo "Running migrations for all databases..." + +# 1. Default DB (Django 기본 - auth, admin, sessions) +echo "[1/4] Migrating default database..." +python manage.py migrate --database=default --noinput + +# 2. Vehicles DB +echo "[2/4] Migrating vehicles database..." +python manage.py migrate --database=vehicles_db --noinput + +# 3. Detections DB +echo "[3/4] Migrating detections database..." +python manage.py migrate --database=detections_db --noinput + +# 4. Notifications DB +echo "[4/4] Migrating notifications database..." +python manage.py migrate --database=notifications_db --noinput + +echo "All migrations completed!" + +# Static 파일 수집 (프로덕션) +if [ "$DJANGO_SETTINGS_MODULE" = "config.settings.prod" ]; then + echo "Collecting static files..." + python manage.py collectstatic --noinput +fi + +# MQTT Subscriber 백그라운드 실행 +echo "Starting MQTT Subscriber..." +python -c " +import django +django.setup() +from core.mqtt.subscriber import start_mqtt_subscriber +start_mqtt_subscriber() +" & + +# Gunicorn 시작 +echo "Starting Gunicorn..." +opentelemetry-instrument \ + --service_name speedcam-api \ + gunicorn config.wsgi:application \ + --bind 0.0.0.0:8000 \ + --workers ${GUNICORN_WORKERS:-4} \ + --threads ${GUNICORN_THREADS:-2} \ + --access-logfile - \ + --error-logfile - diff --git a/scripts/start_ocr_worker.sh b/scripts/start_ocr_worker.sh new file mode 100644 index 00000000..40557a6c --- /dev/null +++ b/scripts/start_ocr_worker.sh @@ -0,0 +1,14 @@ +#!/bin/bash +set -e + +echo "Starting OCR Worker (Celery)..." + +# Celery Worker 시작 (prefork pool - CPU 집약적) +opentelemetry-instrument \ + --service_name speedcam-ocr \ + celery -A config worker \ + --pool=prefork \ + --concurrency=${OCR_CONCURRENCY:-4} \ + --queues=ocr_queue \ + --hostname=ocr@%h \ + --loglevel=${LOG_LEVEL:-info} diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..c711cd32 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,10 @@ +[flake8] +max-line-length = 120 +exclude = + .venv, + migrations, + __pycache__, + .git +per-file-ignores = + config/settings/*.py: F403, F405 + tests/conftest.py: E402 diff --git a/tasks/__init__.py b/tasks/__init__.py new file mode 100644 index 00000000..c3d404a8 --- /dev/null +++ b/tasks/__init__.py @@ -0,0 +1,6 @@ +# Celery Tasks Package +from .dlq_tasks import process_dlq_message +from .notification_tasks import send_notification +from .ocr_tasks import process_ocr + +__all__ = ["process_ocr", "send_notification", "process_dlq_message"] diff --git a/tasks/dlq_tasks.py b/tasks/dlq_tasks.py new file mode 100644 index 00000000..41095161 --- /dev/null +++ b/tasks/dlq_tasks.py @@ -0,0 +1,42 @@ +"""DLQ (Dead Letter Queue) Consumer Task""" + +import logging + +from celery import shared_task +from django.utils import timezone + +logger = logging.getLogger(__name__) + + +@shared_task(bind=True, acks_late=True) +def process_dlq_message(self, *args, **kwargs): + """ + DLQ 메시지 처리 + + Dead Letter Queue로 라우팅된 실패 메시지를 로깅하고 + 관련 Detection 상태를 업데이트합니다. + """ + headers = self.request.headers or {} + original_queue = headers.get("x-first-death-queue", "unknown") + death_reason = headers.get("x-first-death-reason", "unknown") + + logger.error( + f"DLQ message received: queue={original_queue}, " + f"reason={death_reason}, args={args}, kwargs={kwargs}" + ) + + # Detection 상태 업데이트 시도 + detection_id = args[0] if args else kwargs.get("detection_id") + if detection_id: + try: + from apps.detections.models import Detection + + Detection.objects.using("detections_db").filter(id=detection_id).update( + status="failed", + error_message=f"DLQ: {death_reason} from {original_queue}", + # QuerySet.update() bypasses auto_now, so set explicitly + updated_at=timezone.now(), + ) + logger.info(f"Detection {detection_id} marked as failed via DLQ") + except Exception as e: + logger.error(f"Failed to update detection {detection_id} from DLQ: {e}") diff --git a/tasks/notification_tasks.py b/tasks/notification_tasks.py new file mode 100644 index 00000000..770a3c89 --- /dev/null +++ b/tasks/notification_tasks.py @@ -0,0 +1,180 @@ +"""Notification Worker Tasks (I/O 집약적)""" + +import logging +import os + +from celery import shared_task +from django.utils import timezone + +logger = logging.getLogger(__name__) + +# Mock 모드 (테스트용) +FCM_MOCK = os.getenv("FCM_MOCK", "false").lower() == "true" + + +@shared_task( + bind=True, + max_retries=3, + autoretry_for=(Exception,), + retry_backoff=True, + retry_backoff_max=600, + acks_late=True, +) +def send_notification(self, detection_id: int): + """ + FCM 푸시 알림 전송 Task + - 대시보드 토픽 브로드캐스트 (모든 감지에 대해) + - 매칭된 차량 개별 푸시 (차량 있는 경우) + - MSA: 각 서비스별 DB에서 조회 + """ + from apps.detections.models import Detection + from apps.notifications.models import Notification + from apps.vehicles.models import Vehicle + + try: + # 1. Detection 조회 (detections_db) + detection = Detection.objects.using("detections_db").get( + id=detection_id, status="completed" + ) + + # 2. 알림 메시지 생성 + title = f"⚠️ 과속 위반 감지: {detection.ocr_result or '미확인'}" + body = ( + f"📍 위치: {detection.location or '알 수 없음'}\n" + f"🚗 속도: {detection.detected_speed}km/h " + f"(제한: {detection.speed_limit}km/h)" + ) + data = { + "detection_id": str(detection_id), + "plate_number": detection.ocr_result or "", + "speed": str(detection.detected_speed), + "speed_limit": str(detection.speed_limit), + "location": detection.location or "", + "detected_at": detection.detected_at.isoformat(), + } + + # 3. 대시보드 토픽으로 항상 전송 (중복 방지) + topic_response = None + already_sent_topic = ( + Notification.objects.using("notifications_db") + .filter( + detection_id=detection_id, + fcm_token="topic:dashboard_alerts", + status="sent", + ) + .exists() + ) + + if not already_sent_topic: + try: + if FCM_MOCK: + topic_response = f"mock-topic-{detection_id}" + else: + from core.firebase.fcm import send_topic_notification + + topic_response = send_topic_notification( + "dashboard_alerts", title, body, data + ) + logger.info( + f"Dashboard topic notification sent for detection " + f"{detection_id}: {topic_response}" + ) + except Exception as e: + logger.warning( + f"Dashboard topic notification failed for detection " + f"{detection_id}: {e}" + ) + + # 4. 토픽 알림 이력 저장 (notifications_db) + Notification.objects.using("notifications_db").create( + detection_id=detection_id, + fcm_token="topic:dashboard_alerts", + title=title, + body=body, + status="sent" if topic_response else "failed", + sent_at=timezone.now() if topic_response else None, + error_message=None if topic_response else "Topic send failed", + ) + else: + topic_response = "already_sent" + logger.info( + f"Dashboard topic notification already sent for detection " + f"{detection_id}, skipping" + ) + + # 5. 매칭된 차량에 개별 푸시 (기존 동작) + vehicle = None + if detection.vehicle_id: + try: + vehicle = Vehicle.objects.using("vehicles_db").get( + id=detection.vehicle_id + ) + except Vehicle.DoesNotExist: + logger.warning(f"Vehicle {detection.vehicle_id} not found") + + if vehicle and vehicle.fcm_token: + try: + if FCM_MOCK: + vehicle_response = f"mock-message-id-{detection_id}" + else: + from core.firebase.fcm import send_push_notification + + vehicle_response = send_push_notification( + token=vehicle.fcm_token, + title=title, + body=body, + data=data, + ) + + Notification.objects.using("notifications_db").create( + detection_id=detection_id, + fcm_token=vehicle.fcm_token, + title=title, + body=body, + status="sent", + sent_at=timezone.now(), + ) + logger.info( + f"Vehicle notification sent for detection " + f"{detection_id}: {vehicle_response}" + ) + except Exception as e: + logger.warning( + f"Vehicle notification failed for detection " f"{detection_id}: {e}" + ) + Notification.objects.using("notifications_db").create( + detection_id=detection_id, + fcm_token=vehicle.fcm_token, + title=title, + body=body, + status="failed", + error_message=str(e), + ) + + return { + "status": "sent", + "topic": bool(topic_response), + "vehicle": bool(vehicle and vehicle.fcm_token), + } + + except Detection.DoesNotExist: + logger.warning(f"Detection {detection_id} not found or not completed, retrying") + raise self.retry(countdown=3, max_retries=3) + + except Exception as exc: + try: + from apps.notifications.models import Notification + + Notification.objects.using("notifications_db").create( + detection_id=detection_id, + status="failed", + retry_count=self.request.retries, + error_message=str(exc), + ) + except Exception as db_err: + logger.error( + f"Failed to record notification failure for detection {detection_id}: {db_err}" + ) + + logger.error(f"Notification failed for detection {detection_id}: {exc}") + raise diff --git a/tasks/ocr_tasks.py b/tasks/ocr_tasks.py new file mode 100644 index 00000000..a5479396 --- /dev/null +++ b/tasks/ocr_tasks.py @@ -0,0 +1,168 @@ +"""OCR Worker Tasks (CPU 집약적)""" + +import logging +import os +import re + +from celery import shared_task +from django.utils import timezone + +logger = logging.getLogger(__name__) + +# Mock 모드 (테스트용) +OCR_MOCK = os.getenv("OCR_MOCK", "false").lower() == "true" + +# EasyOCR Reader 캐싱 (Worker 프로세스 수준 싱글턴) +_ocr_reader = None + + +def get_ocr_reader(): + """EasyOCR Reader를 캐싱하여 모델 재로딩 방지 (prefork Worker당 1회)""" + global _ocr_reader + if _ocr_reader is None: + import easyocr + + _ocr_reader = easyocr.Reader(["ko", "en"], gpu=False) + logger.info("EasyOCR Reader initialized") + return _ocr_reader + + +def mock_ocr_result(): + """테스트용 가짜 OCR 결과 생성""" + import random + + num1 = random.randint(10, 999) + char = random.choice("가나다라마바사아자차카타파하") + num2 = random.randint(1000, 9999) + plate = f"{num1}{char}{num2}" + confidence = random.uniform(0.85, 0.99) + return plate, confidence + + +def is_valid_plate(text: str) -> bool: + """한국 번호판 패턴 검증""" + pattern = r"^\d{2,3}[가-힣]\d{4}$" + return bool(re.match(pattern, text.replace(" ", ""))) + + +def normalize_plate(text: str) -> str: + """번호판 정규화 (공백 제거)""" + return text.replace(" ", "").upper() + + +@shared_task(bind=True, max_retries=3, default_retry_delay=60, acks_late=True) +def process_ocr(self, detection_id: int, gcs_uri: str): + """ + OCR 처리 Task + - GCS에서 이미지 다운로드 + - EasyOCR 실행 + - 직접 MySQL 업데이트 (Choreography 패턴) + - MSA: 각 서비스별 DB 사용 + """ + from apps.detections.models import Detection + from apps.vehicles.models import Vehicle + from tasks.notification_tasks import send_notification + + try: + # 1. 상태를 processing으로 업데이트 (detections_db) + Detection.objects.using("detections_db").filter(id=detection_id).update( + status="processing", updated_at=timezone.now() + ) + + if OCR_MOCK: + # Mock 모드: 실제 OCR 없이 가짜 결과 반환 + import random + import time + + time.sleep(random.uniform(0.1, 0.5)) + plate_number, confidence = mock_ocr_result() + else: + # 실제 OCR 처리 + from core.gcs.client import download_image + + # 2. GCS에서 이미지 다운로드 + image_bytes = download_image(gcs_uri) + + # 3. EasyOCR 실행 (캐싱된 Reader 사용) + reader = get_ocr_reader() + results = reader.readtext(image_bytes) + + # 4. 번호판 파싱 (신뢰도 가장 높은 결과) + plate_number = None + confidence = 0.0 + + for bbox, text, conf in results: + if is_valid_plate(text) and conf > confidence: + plate_number = normalize_plate(text) + confidence = conf + + # 5. 직접 MySQL 업데이트 (detections_db) + detection = Detection.objects.using("detections_db").get(id=detection_id) + detection.ocr_result = plate_number + detection.ocr_confidence = confidence + detection.status = "completed" + detection.processed_at = timezone.now() + detection.save( + using="detections_db", + update_fields=[ + "ocr_result", + "ocr_confidence", + "status", + "processed_at", + "updated_at", + ], + ) + + # 6. Vehicle 매칭 (MSA: vehicles_db에서 조회) + if plate_number: + try: + vehicle = ( + Vehicle.objects.using("vehicles_db") + .filter(plate_number=plate_number) + .first() + ) + if vehicle: + detection.vehicle_id = vehicle.id + detection.save( + using="detections_db", + update_fields=["vehicle_id", "updated_at"], + ) + except Exception as e: + logger.warning(f"Vehicle lookup failed: {e}") + + # 7. Always send notification for completed detections + # (dashboard gets topic notification; matched vehicle gets individual push) + try: + send_notification.apply_async(args=[detection_id], queue="fcm_queue") + except Exception as e: + logger.warning( + f"Failed to enqueue notification for detection {detection_id}: {e}" + ) + + logger.info(f"OCR completed for detection {detection_id}: {plate_number}") + return { + "detection_id": detection_id, + "plate": plate_number, + "confidence": confidence, + } + + except Exception as exc: + is_final_retry = self.request.retries >= self.max_retries + if is_final_retry: + # 최종 실패: status=failed 기록 + Detection.objects.using("detections_db").filter(id=detection_id).update( + status="failed", error_message=str(exc), updated_at=timezone.now() + ) + logger.error(f"OCR permanently failed for detection {detection_id}: {exc}") + raise + else: + # 재시도 가능: processing 유지, 에러만 기록 + Detection.objects.using("detections_db").filter(id=detection_id).update( + error_message=f"Retry {self.request.retries}: {exc}", + updated_at=timezone.now(), + ) + logger.warning( + f"OCR retry {self.request.retries}/{self.max_retries} " + f"for detection {detection_id}: {exc}" + ) + raise self.retry(exc=exc) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..304b9251 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +# Tests Package diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..1478bd0c --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,116 @@ +""" +Pytest Configuration and Fixtures +""" + +import os +from unittest.mock import MagicMock, patch + +import pytest + +# Django 설정 +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.dev") + +import django + +django.setup() + +from django.utils import timezone + +from apps.detections.models import Detection +from apps.vehicles.models import Vehicle + + +@pytest.fixture +def sample_vehicle(db): + """테스트용 Vehicle 생성""" + return Vehicle.objects.create( + plate_number="12가3456", + owner_name="테스트 사용자", + owner_phone="010-1234-5678", + fcm_token="test-fcm-token-12345", + ) + + +@pytest.fixture +def sample_vehicle_no_fcm(db): + """FCM 토큰 없는 Vehicle 생성""" + return Vehicle.objects.create( + plate_number="34나5678", + owner_name="테스트 사용자2", + owner_phone="010-9876-5432", + ) + + +@pytest.fixture +def sample_detection(db, sample_vehicle): + """테스트용 Detection 생성""" + return Detection.objects.create( + vehicle_id=sample_vehicle.id, + camera_id="CAM-001", + location="테스트 위치", + detected_speed=85.5, + speed_limit=60.0, + detected_at=timezone.now(), + image_gcs_uri="gs://test-bucket/test-image.jpg", + status="pending", + ) + + +@pytest.fixture +def pending_detection(db): + """Pending 상태의 Detection""" + return Detection.objects.create( + camera_id="CAM-TEST-001", + location="테스트 위치", + detected_speed=95.0, + speed_limit=60.0, + detected_at=timezone.now(), + image_gcs_uri="gs://test-bucket/pending-test.jpg", + status="pending", + ) + + +@pytest.fixture +def completed_detection(db, sample_vehicle): + """Completed 상태의 Detection""" + return Detection.objects.create( + vehicle_id=sample_vehicle.id, + camera_id="CAM-TEST-002", + location="완료 테스트 위치", + detected_speed=100.0, + speed_limit=60.0, + detected_at=timezone.now(), + processed_at=timezone.now(), + image_gcs_uri="gs://test-bucket/completed-test.jpg", + ocr_result="12가3456", + ocr_confidence=0.95, + status="completed", + ) + + +@pytest.fixture +def mock_celery_task(): + """Celery Task Mock""" + with patch("celery.app.task.Task.apply_async") as mock: + mock.return_value = MagicMock(id="mock-task-id") + yield mock + + +@pytest.fixture +def mock_fcm(): + """Firebase FCM Mock""" + with patch("firebase_admin.messaging.send") as mock: + mock.return_value = "mock-message-id" + yield mock + + +@pytest.fixture +def mock_gcs(): + """Google Cloud Storage Mock""" + with patch("google.cloud.storage.Client") as mock: + mock_blob = MagicMock() + mock_blob.download_as_bytes.return_value = b"fake-image-data" + mock_bucket = MagicMock() + mock_bucket.blob.return_value = mock_blob + mock.return_value.bucket.return_value = mock_bucket + yield mock diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 00000000..211067f0 --- /dev/null +++ b/tests/integration/__init__.py @@ -0,0 +1 @@ +# Integration Tests Package diff --git a/tests/integration/test_api_endpoints.py b/tests/integration/test_api_endpoints.py new file mode 100644 index 00000000..50af0d3e --- /dev/null +++ b/tests/integration/test_api_endpoints.py @@ -0,0 +1,187 @@ +""" +Integration Tests for REST API Endpoints +""" + +import pytest +from django.test import Client +from rest_framework import status + +from apps.notifications.models import Notification +from apps.vehicles.models import Vehicle + + +@pytest.fixture +def api_client(): + """테스트용 API Client""" + return Client() + + +@pytest.mark.django_db(databases="__all__") +class TestVehicleAPI: + """Vehicle API 통합 테스트""" + + def test_list_vehicles(self, api_client, sample_vehicle): + """차량 목록 조회 테스트""" + response = api_client.get("/api/v1/vehicles/") + + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["count"] >= 1 + + def test_create_vehicle(self, api_client, db): + """차량 등록 테스트""" + data = { + "plate_number": "78라9012", + "owner_name": "API 테스트 사용자", + "owner_phone": "010-7890-1234", + } + response = api_client.post( + "/api/v1/vehicles/", data=data, content_type="application/json" + ) + + assert response.status_code == status.HTTP_201_CREATED + assert Vehicle.objects.filter(plate_number="78라9012").exists() + + def test_create_duplicate_vehicle(self, api_client, sample_vehicle): + """중복 차량 등록 시 에러 테스트""" + data = { + "plate_number": sample_vehicle.plate_number, # 중복 + "owner_name": "중복 테스트", + "owner_phone": "010-0000-0000", + } + response = api_client.post( + "/api/v1/vehicles/", data=data, content_type="application/json" + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + + def test_retrieve_vehicle(self, api_client, sample_vehicle): + """차량 상세 조회 테스트""" + response = api_client.get(f"/api/v1/vehicles/{sample_vehicle.id}/") + + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["plate_number"] == sample_vehicle.plate_number + + def test_update_vehicle_fcm_token(self, api_client, sample_vehicle): + """FCM 토큰 업데이트 테스트""" + data = {"fcm_token": "new-fcm-token-updated"} + response = api_client.patch( + f"/api/v1/vehicles/{sample_vehicle.id}/", + data=data, + content_type="application/json", + ) + + assert response.status_code == status.HTTP_200_OK + sample_vehicle.refresh_from_db() + assert sample_vehicle.fcm_token == "new-fcm-token-updated" + + +@pytest.mark.django_db(databases="__all__") +class TestDetectionAPI: + """Detection API 통합 테스트""" + + def test_list_detections(self, api_client, sample_detection): + """Detection 목록 조회 테스트""" + response = api_client.get("/api/v1/detections/") + + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["count"] >= 1 + + def test_retrieve_detection(self, api_client, sample_detection): + """Detection 상세 조회 테스트""" + response = api_client.get(f"/api/v1/detections/{sample_detection.id}/") + + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["camera_id"] == sample_detection.camera_id + + def test_filter_detections_by_status( + self, api_client, pending_detection, completed_detection + ): + """상태별 Detection 필터링 테스트""" + # pending 상태만 조회 + response = api_client.get("/api/v1/detections/?status=pending") + + assert response.status_code == status.HTTP_200_OK + data = response.json() + for result in data["results"]: + assert result["status"] == "pending" + + def test_filter_detections_by_camera(self, api_client, sample_detection): + """카메라별 Detection 필터링 테스트""" + response = api_client.get( + f"/api/v1/detections/?camera_id={sample_detection.camera_id}" + ) + + assert response.status_code == status.HTTP_200_OK + data = response.json() + for result in data["results"]: + assert result["camera_id"] == sample_detection.camera_id + + +@pytest.mark.django_db(databases="__all__") +class TestNotificationAPI: + """Notification API 통합 테스트""" + + @pytest.fixture + def sample_notification(self, db, completed_detection): + """테스트용 Notification""" + return Notification.objects.create( + detection_id=completed_detection.id, + fcm_token="test-token", + title="테스트 알림", + body="테스트 내용", + status="sent", + ) + + def test_list_notifications(self, api_client, sample_notification): + """Notification 목록 조회 테스트""" + response = api_client.get("/api/v1/notifications/") + + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["count"] >= 1 + + def test_retrieve_notification(self, api_client, sample_notification): + """Notification 상세 조회 테스트""" + response = api_client.get(f"/api/v1/notifications/{sample_notification.id}/") + + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["status"] == "sent" + + def test_filter_notifications_by_status(self, api_client, sample_notification): + """상태별 Notification 필터링 테스트""" + response = api_client.get("/api/v1/notifications/?status=sent") + + assert response.status_code == status.HTTP_200_OK + data = response.json() + for result in data["results"]: + assert result["status"] == "sent" + + +@pytest.mark.django_db(databases="__all__") +class TestHealthEndpoints: + """헬스체크 및 기본 엔드포인트 테스트""" + + def test_health_check(self, api_client): + """헬스체크 엔드포인트 테스트""" + response = api_client.get("/health/") + + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["status"] == "healthy" + + def test_home_endpoint(self, api_client): + """홈 엔드포인트 테스트""" + response = api_client.get("/") + + assert response.status_code == status.HTTP_200_OK + + def test_swagger_docs(self, api_client): + """Swagger 문서 엔드포인트 테스트""" + response = api_client.get("/swagger/") + + assert response.status_code == status.HTTP_200_OK diff --git a/tests/integration/test_event_flow.py b/tests/integration/test_event_flow.py new file mode 100644 index 00000000..6dfc6e0b --- /dev/null +++ b/tests/integration/test_event_flow.py @@ -0,0 +1,316 @@ +""" +Integration Tests for Event-Driven Flow + +테스트 시나리오: +1. Detection 생성 → OCR Task → Notification Task (전체 플로우) +2. MQTT 메시지 수신 → Detection 생성 (Ingestion 플로우) +3. 재시도 로직 테스트 + +Note: google.cloud와 firebase_admin 모듈이 없는 환경에서는 일부 테스트 skip +""" + +from unittest.mock import patch + +import pytest +from django.utils import timezone + +from apps.detections.models import Detection +from apps.notifications.models import Notification + +# 모듈 설치 여부 확인 +try: + from google.cloud import storage # noqa: F401 + + google_available = True +except ImportError: + google_available = False + +try: + import firebase_admin # noqa: F401 + + firebase_available = True +except ImportError: + firebase_available = False + + +@pytest.mark.django_db(transaction=True, databases="__all__") +class TestIngestionFlow: + """ + Ingestion 플로우 테스트 + MQTT/API → Detection 생성 → Task 발행 + """ + + def test_detection_creation_triggers_pending_status(self): + """Detection 생성 시 pending 상태 테스트""" + detection = Detection.objects.create( + camera_id="CAM-INGEST-001", + location="수신 테스트", + detected_speed=75.0, + speed_limit=60.0, + detected_at=timezone.now(), + image_gcs_uri="gs://test-bucket/ingest.jpg", + status="pending", + ) + + assert detection.status == "pending" + assert detection.processed_at is None + assert detection.ocr_result is None + + def test_detection_with_speed_violation(self): + """과속 감지 데이터 생성 테스트""" + detection = Detection.objects.create( + camera_id="CAM-SPEED-001", + location="과속 테스트", + detected_speed=95.0, # 과속 + speed_limit=60.0, + detected_at=timezone.now(), + image_gcs_uri="gs://test-bucket/speed.jpg", + status="pending", + ) + + # 과속 여부 확인 + assert detection.detected_speed > detection.speed_limit + violation_amount = detection.detected_speed - detection.speed_limit + assert violation_amount == 35.0 + + +@pytest.mark.django_db(transaction=True, databases="__all__") +class TestChoreographyPattern: + """ + Choreography 패턴 테스트 + 각 서비스가 독립적으로 다음 이벤트를 발행하는지 테스트 + """ + + def test_detection_to_notification_data_flow(self, sample_vehicle): + """Detection → Notification 데이터 흐름 테스트 (MSA: ID 참조)""" + # Detection 생성 및 완료 + detection = Detection.objects.create( + vehicle_id=sample_vehicle.id, + camera_id="CAM-FLOW-001", + location="흐름 테스트", + detected_speed=90.0, + speed_limit=60.0, + detected_at=timezone.now(), + processed_at=timezone.now(), + image_gcs_uri="gs://test-bucket/flow.jpg", + ocr_result="12가3456", + ocr_confidence=0.95, + status="completed", + ) + + # Notification 생성 + notification = Notification.objects.create( + detection_id=detection.id, + fcm_token=sample_vehicle.fcm_token, + title=f"과속 위반: {detection.ocr_result}", + body=f"속도: {detection.detected_speed}km/h", + status="sent", + ) + + # 데이터 연결 확인 (MSA: ID 기반 참조) + assert notification.detection_id == detection.id + assert notification.fcm_token == sample_vehicle.fcm_token + assert detection.ocr_result in notification.title + + def test_multiple_notifications_for_detection(self, completed_detection): + """하나의 Detection에 여러 Notification (재시도) 테스트""" + # 첫 번째 알림 (실패) + Notification.objects.create( + detection_id=completed_detection.id, + fcm_token="token-1", + title="알림 1", + body="본문 1", + status="failed", + retry_count=0, + error_message="Connection timeout", + ) + + # 두 번째 알림 (재시도 - 성공) + Notification.objects.create( + detection_id=completed_detection.id, + fcm_token="token-1", + title="알림 1", + body="본문 1", + status="sent", + retry_count=1, + ) + + # 동일 Detection에 여러 알림 존재 확인 (MSA: ID 기반 조회) + notifications = Notification.objects.filter(detection_id=completed_detection.id) + assert notifications.count() == 2 + assert notifications.filter(status="sent").count() == 1 + + +@pytest.mark.django_db(transaction=True, databases="__all__") +class TestErrorHandling: + """에러 핸들링 및 재시도 로직 테스트""" + + def test_detection_not_found(self): + """존재하지 않는 Detection 처리 테스트""" + # 존재하지 않는 ID + invalid_id = 999999 + detection = Detection.objects.filter(id=invalid_id).first() + + assert detection is None + + def test_notification_without_vehicle(self): + """차량이 연결되지 않은 Detection에 대한 알림 테스트""" + # 차량 없는 Detection + detection = Detection.objects.create( + camera_id="CAM-NOVEH-001", + location="차량 없음 테스트", + detected_speed=85.0, + speed_limit=60.0, + detected_at=timezone.now(), + image_gcs_uri="gs://test-bucket/noveh.jpg", + ocr_result="00가0000", + status="completed", + ) + + # vehicle_id가 None인지 확인 (MSA: BigIntegerField) + assert detection.vehicle_id is None + + # vehicle_id가 없으면 FCM 토큰 조회 불가 + fcm_token = None + if detection.vehicle_id: + from apps.vehicles.models import Vehicle + + vehicle = Vehicle.objects.filter(id=detection.vehicle_id).first() + fcm_token = vehicle.fcm_token if vehicle else None + assert fcm_token is None + + def test_detection_status_failed(self): + """Detection 실패 상태 처리 테스트""" + detection = Detection.objects.create( + camera_id="CAM-FAIL-001", + location="실패 테스트", + detected_speed=80.0, + speed_limit=60.0, + detected_at=timezone.now(), + image_gcs_uri="gs://test-bucket/fail.jpg", + status="pending", + ) + + # 처리 중 에러 발생 시나리오 + detection.status = "failed" + detection.error_message = "OCR processing failed: Invalid image format" + detection.save() + + detection.refresh_from_db() + assert detection.status == "failed" + assert "Invalid image format" in detection.error_message + + +@pytest.mark.django_db(transaction=True, databases="__all__") +class TestEndToEndDataIntegrity: + """End-to-End 데이터 무결성 테스트""" + + def test_complete_data_flow(self, sample_vehicle): + """전체 데이터 흐름 무결성 테스트 (MSA: ID 기반 참조)""" + # 1. Detection 생성 (Ingestion) + detection = Detection.objects.create( + camera_id="CAM-E2E-001", + location="E2E 테스트 위치", + detected_speed=90.0, + speed_limit=60.0, + detected_at=timezone.now(), + image_gcs_uri="gs://test-bucket/e2e-test.jpg", + status="pending", + ) + + # 2. OCR 처리 (시뮬레이션) + detection.status = "processing" + detection.save() + + detection.ocr_result = sample_vehicle.plate_number + detection.ocr_confidence = 0.95 + detection.vehicle_id = sample_vehicle.id + detection.processed_at = timezone.now() + detection.status = "completed" + detection.save() + + # 3. Notification 생성 + notification = Notification.objects.create( + detection_id=detection.id, + fcm_token=sample_vehicle.fcm_token, + title=f"⚠️ 과속 위반 감지: {detection.ocr_result}", + body=f"📍 위치: {detection.location}\n🚗 속도: {detection.detected_speed}km/h", + status="sent", + sent_at=timezone.now(), + ) + + # 검증 + assert detection.status == "completed" + assert detection.vehicle_id == sample_vehicle.id + assert notification.status == "sent" + + # 관계 확인 (MSA: ID 기반 조회) + assert notification.detection_id == detection.id + assert Detection.objects.filter( + vehicle_id=sample_vehicle.id, id=detection.id + ).exists() + assert Notification.objects.filter( + detection_id=detection.id, id=notification.id + ).exists() + + def test_statistics_calculation(self, sample_vehicle): + """통계 계산 테스트""" + # 여러 Detection 생성 + speeds = [75.0, 85.0, 95.0, 105.0] + for i, speed in enumerate(speeds): + Detection.objects.create( + vehicle_id=sample_vehicle.id, + camera_id=f"CAM-STAT-{i}", + location="통계 테스트", + detected_speed=speed, + speed_limit=60.0, + detected_at=timezone.now(), + image_gcs_uri=f"gs://test-bucket/stat-{i}.jpg", + status="completed" if i % 2 == 0 else "pending", + ) + + # 통계 확인 + total = Detection.objects.count() + completed = Detection.objects.filter(status="completed").count() + pending = Detection.objects.filter(status="pending").count() + + assert total >= 4 + assert completed >= 2 + assert pending >= 2 + + +@pytest.mark.django_db(transaction=True, databases="__all__") +@pytest.mark.skipif( + not (google_available and firebase_available), + reason="Requires google.cloud and firebase_admin", +) +class TestFullTaskExecution: + """실제 Task 실행 테스트 (모듈 설치된 환경에서만)""" + + @patch("tasks.ocr_tasks.os.environ.get") + @patch("tasks.notification_tasks.os.environ.get") + def test_ocr_and_notification_tasks( + self, mock_notif_env, mock_ocr_env, sample_vehicle + ): + """OCR Task와 Notification Task 연계 테스트""" + mock_ocr_env.return_value = "true" + mock_notif_env.return_value = "true" + + from tasks.ocr_tasks import process_ocr + + # Detection 생성 + detection = Detection.objects.create( + camera_id="CAM-TASK-001", + location="Task 테스트", + detected_speed=95.0, + speed_limit=60.0, + detected_at=timezone.now(), + image_gcs_uri="gs://test-bucket/task.jpg", + status="pending", + ) + + # OCR 실행 + process_ocr(detection.id, gcs_uri=detection.image_gcs_uri) + detection.refresh_from_db() + + assert detection.status == "completed" diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 00000000..ddc0314c --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1 @@ +# Unit Tests Package diff --git a/tests/unit/test_models.py b/tests/unit/test_models.py new file mode 100644 index 00000000..eea70b9e --- /dev/null +++ b/tests/unit/test_models.py @@ -0,0 +1,148 @@ +""" +Unit Tests for Django Models +""" + +import pytest +from django.db import IntegrityError + +from apps.detections.models import Detection +from apps.notifications.models import Notification +from apps.vehicles.models import Vehicle + + +@pytest.mark.django_db(databases="__all__") +class TestVehicleModel: + """Vehicle 모델 테스트""" + + def test_create_vehicle(self, sample_vehicle): + """Vehicle 생성 테스트""" + assert sample_vehicle.pk is not None + assert sample_vehicle.plate_number == "12가3456" + assert sample_vehicle.owner_name == "테스트 사용자" + + def test_vehicle_str(self, sample_vehicle): + """Vehicle __str__ 테스트""" + assert str(sample_vehicle) == "12가3456" + + def test_vehicle_unique_plate(self, sample_vehicle, db): + """차량 번호 중복 불가 테스트""" + with pytest.raises(IntegrityError): + Vehicle.objects.create( + plate_number="12가3456", # 중복 + owner_name="다른 사용자", + owner_phone="010-0000-0000", + ) + + def test_vehicle_timestamps(self, sample_vehicle): + """타임스탬프 자동 생성 테스트""" + assert sample_vehicle.created_at is not None + assert sample_vehicle.updated_at is not None + + +@pytest.mark.django_db(databases="__all__") +class TestDetectionModel: + """Detection 모델 테스트""" + + def test_create_detection(self, sample_detection): + """Detection 생성 테스트""" + assert sample_detection.pk is not None + assert sample_detection.camera_id == "CAM-001" + assert sample_detection.status == "pending" + + def test_detection_str(self, sample_detection): + """Detection __str__ 테스트""" + # __str__ = f"{self.ocr_result or 'Unknown'} - {self.detected_speed}km/h" + expected = f"Unknown - {sample_detection.detected_speed}km/h" + assert str(sample_detection) == expected + + def test_detection_status_choices(self, pending_detection): + """Detection status 변경 테스트""" + assert pending_detection.status == "pending" + + pending_detection.status = "processing" + pending_detection.save() + pending_detection.refresh_from_db() + assert pending_detection.status == "processing" + + pending_detection.status = "completed" + pending_detection.save() + pending_detection.refresh_from_db() + assert pending_detection.status == "completed" + + def test_detection_vehicle_relation(self, sample_detection, sample_vehicle): + """Detection-Vehicle 관계 테스트 (MSA: BigIntegerField ID 참조)""" + assert sample_detection.vehicle_id == sample_vehicle.id + assert Detection.objects.filter( + vehicle_id=sample_vehicle.id, id=sample_detection.id + ).exists() + + def test_detection_speed_violation(self, sample_detection): + """과속 여부 확인""" + assert sample_detection.detected_speed > sample_detection.speed_limit + + def test_detection_nullable_fields(self, pending_detection): + """Nullable 필드 테스트""" + assert pending_detection.vehicle_id is None + assert pending_detection.ocr_result is None + assert pending_detection.processed_at is None + + +@pytest.mark.django_db(databases="__all__") +class TestNotificationModel: + """Notification 모델 테스트""" + + def test_create_notification(self, completed_detection, db): + """Notification 생성 테스트""" + notification = Notification.objects.create( + detection_id=completed_detection.id, + fcm_token="test-token", + title="테스트 알림", + body="테스트 내용", + status="pending", + ) + assert notification.pk is not None + assert notification.status == "pending" + + def test_notification_str(self, completed_detection, db): + """Notification __str__ 테스트""" + notification = Notification.objects.create( + detection_id=completed_detection.id, + fcm_token="test-token", + title="테스트 알림", + body="테스트 내용", + status="sent", + ) + # __str__ = f"Notification for Detection #{self.detection_id} - {self.status}" + expected = f"Notification for Detection #{completed_detection.id} - sent" + assert str(notification) == expected + + def test_notification_retry_count(self, completed_detection, db): + """재시도 횟수 테스트""" + notification = Notification.objects.create( + detection_id=completed_detection.id, + fcm_token="test-token", + title="테스트 알림", + body="테스트 내용", + status="failed", + retry_count=0, + ) + + # 재시도 횟수 증가 + notification.retry_count += 1 + notification.save() + notification.refresh_from_db() + assert notification.retry_count == 1 + + def test_notification_detection_relation(self, completed_detection, db): + """Notification-Detection 관계 테스트 (MSA: BigIntegerField ID 참조)""" + notification = Notification.objects.create( + detection_id=completed_detection.id, + fcm_token="test-token", + title="테스트 알림", + body="테스트 내용", + status="sent", + ) + assert notification.detection_id == completed_detection.id + assert Notification.objects.filter( + detection_id=completed_detection.id, id=notification.id + ).exists() diff --git a/tests/unit/test_serializers.py b/tests/unit/test_serializers.py new file mode 100644 index 00000000..e4585b74 --- /dev/null +++ b/tests/unit/test_serializers.py @@ -0,0 +1,126 @@ +""" +Unit Tests for DRF Serializers +""" + +import pytest +from django.utils import timezone + +from apps.detections.serializers import DetectionListSerializer, DetectionSerializer +from apps.notifications.serializers import ( + NotificationListSerializer, + NotificationSerializer, +) +from apps.vehicles.serializers import VehicleSerializer + + +@pytest.mark.django_db(databases="__all__") +class TestVehicleSerializer: + """Vehicle Serializer 테스트""" + + def test_serialize_vehicle(self, sample_vehicle): + """Vehicle 직렬화 테스트""" + serializer = VehicleSerializer(sample_vehicle) + data = serializer.data + + assert data["plate_number"] == "12가3456" + assert data["owner_name"] == "테스트 사용자" + assert "created_at" in data + + def test_deserialize_vehicle(self, db): + """Vehicle 역직렬화 테스트""" + data = { + "plate_number": "56다7890", + "owner_name": "새 사용자", + "owner_phone": "010-1111-2222", + } + serializer = VehicleSerializer(data=data) + + assert serializer.is_valid() + vehicle = serializer.save() + assert vehicle.plate_number == "56다7890" + + def test_invalid_plate_number(self, db): + """잘못된 차량 번호 테스트""" + data = { + "plate_number": "", # 빈 문자열 + "owner_name": "테스트", + "owner_phone": "010-1111-2222", + } + serializer = VehicleSerializer(data=data) + + assert not serializer.is_valid() + assert "plate_number" in serializer.errors + + +@pytest.mark.django_db(databases="__all__") +class TestDetectionSerializer: + """Detection Serializer 테스트""" + + def test_serialize_detection(self, sample_detection): + """Detection 직렬화 테스트""" + serializer = DetectionSerializer(sample_detection) + data = serializer.data + + assert data["camera_id"] == "CAM-001" + assert data["status"] == "pending" + assert data["detected_speed"] == 85.5 + assert data["speed_limit"] == 60.0 + + def test_serialize_completed_detection(self, completed_detection): + """완료된 Detection 직렬화 테스트""" + serializer = DetectionSerializer(completed_detection) + data = serializer.data + + assert data["status"] == "completed" + assert data["ocr_result"] == "12가3456" + assert data["processed_at"] is not None + + def test_detection_with_vehicle_id(self, sample_detection, sample_vehicle): + """Vehicle이 연결된 Detection 직렬화 테스트 (ListSerializer, MSA: vehicle_id)""" + serializer = DetectionListSerializer(sample_detection) + data = serializer.data + + assert data["vehicle_id"] == sample_vehicle.id + + +@pytest.mark.django_db(databases="__all__") +class TestNotificationSerializer: + """Notification Serializer 테스트""" + + def test_serialize_notification(self, completed_detection, db): + """Notification 직렬화 테스트""" + from apps.notifications.models import Notification + + notification = Notification.objects.create( + detection_id=completed_detection.id, + fcm_token="test-token", + title="테스트 알림", + body="테스트 내용", + status="sent", + sent_at=timezone.now(), + ) + + serializer = NotificationSerializer(notification) + data = serializer.data + + assert data["title"] == "테스트 알림" + assert data["status"] == "sent" + assert data["detection_id"] == completed_detection.id + + def test_serialize_notification_list(self, completed_detection, db): + """Notification List 직렬화 테스트""" + from apps.notifications.models import Notification + + notification = Notification.objects.create( + detection_id=completed_detection.id, + fcm_token="test-token", + title="리스트 알림", + body="테스트 내용", + status="sent", + sent_at=timezone.now(), + ) + + serializer = NotificationListSerializer(notification) + data = serializer.data + + assert data["detection_id"] == completed_detection.id diff --git a/tests/unit/test_tasks.py b/tests/unit/test_tasks.py new file mode 100644 index 00000000..a636a792 --- /dev/null +++ b/tests/unit/test_tasks.py @@ -0,0 +1,143 @@ +""" +Unit Tests for Celery Tasks +모듈 의존성(google.cloud, firebase_admin)이 없는 환경에서는 skip됨 +""" + +import sys +from unittest.mock import patch + +import pytest + +# google.cloud와 firebase_admin이 없으면 테스트 skip +google_available = "google.cloud" in sys.modules or "google" in sys.modules +firebase_available = "firebase_admin" in sys.modules + +try: + from google.cloud import storage # noqa: F401 + + google_available = True +except ImportError: + google_available = False + +try: + import firebase_admin # noqa: F401 + + firebase_available = True +except ImportError: + firebase_available = False + + +@pytest.mark.django_db(databases="__all__") +class TestOCRTaskMock: + """OCR Task Mock 테스트 (google.cloud 없이)""" + + def test_ocr_mock_result_format(self, pending_detection): + """Mock OCR 결과 형식 테스트""" + import random + + # Mock에서 생성되는 결과 형식 검증 + def generate_mock_plate(): + regions = ["서울", "경기", "인천", "부산", "대구"] + chars = "abcdefghijklmnopqrstuvwxyz가나다라마바사아자차카타파하" + return f"{random.randint(10, 99)}{random.choice(regions)}{random.choice(chars)}{random.randint(1000, 9999)}" + + plate = generate_mock_plate() + assert len(plate) >= 7 # 최소 길이 확인 + + @pytest.mark.skipif(not google_available, reason="google.cloud not installed") + @patch("tasks.ocr_tasks.os.environ.get") + def test_process_ocr_mock_mode(self, mock_env, pending_detection): + """Mock 모드에서 OCR 처리 테스트""" + mock_env.return_value = "true" + + from tasks.ocr_tasks import process_ocr + + process_ocr(pending_detection.id, gcs_uri=pending_detection.image_gcs_uri) + + pending_detection.refresh_from_db() + assert pending_detection.status == "completed" + + +@pytest.mark.django_db(databases="__all__") +class TestNotificationTaskMock: + """Notification Task Mock 테스트 (firebase_admin 없이)""" + + def test_notification_title_format(self, completed_detection): + """알림 제목 형식 테스트""" + title = f"⚠️ 과속 위반 감지: {completed_detection.ocr_result}" + assert "과속 위반 감지" in title + assert completed_detection.ocr_result in title + + def test_notification_body_format(self, completed_detection): + """알림 본문 형식 테스트""" + body = ( + f"📍 위치: {completed_detection.location}\n" + f"🚗 속도: {completed_detection.detected_speed}km/h" + f" (제한: {completed_detection.speed_limit}km/h)" + ) + assert completed_detection.location in body + assert str(completed_detection.detected_speed) in body + + @pytest.mark.skipif(not firebase_available, reason="firebase_admin not installed") + @patch("tasks.notification_tasks.os.environ.get") + def test_send_notification_mock_mode(self, mock_env, completed_detection): + """Mock 모드에서 알림 전송 테스트""" + mock_env.return_value = "true" + + from tasks.notification_tasks import send_notification + + result = send_notification(completed_detection.id) + + assert result["status"] in ["sent", "skipped"] + + +@pytest.mark.django_db(databases="__all__") +class TestTaskErrorHandling: + """Task 에러 핸들링 테스트 (모듈 의존성 없이)""" + + def test_detection_status_transitions(self, pending_detection): + """Detection 상태 전이 테스트""" + assert pending_detection.status == "pending" + + # processing으로 전환 + pending_detection.status = "processing" + pending_detection.save() + pending_detection.refresh_from_db() + assert pending_detection.status == "processing" + + # completed로 전환 + pending_detection.status = "completed" + pending_detection.save() + pending_detection.refresh_from_db() + assert pending_detection.status == "completed" + + def test_detection_failed_status(self, pending_detection): + """Detection 실패 상태 테스트""" + pending_detection.status = "failed" + pending_detection.error_message = "테스트 에러" + pending_detection.save() + pending_detection.refresh_from_db() + + assert pending_detection.status == "failed" + assert pending_detection.error_message == "테스트 에러" + + def test_notification_creation_for_detection(self, completed_detection): + """Detection에 대한 Notification 생성 테스트""" + from apps.notifications.models import Notification + + notification = Notification.objects.create( + detection_id=completed_detection.id, + fcm_token="test-token", + title="테스트 알림", + body="테스트 본문", + status="pending", + ) + + assert notification.detection_id == completed_detection.id + assert notification.status == "pending" + + # sent로 상태 변경 + notification.status = "sent" + notification.save() + notification.refresh_from_db() + assert notification.status == "sent" diff --git a/wait_mysql.py b/wait_mysql.py deleted file mode 100644 index bccae947..00000000 --- a/wait_mysql.py +++ /dev/null @@ -1,20 +0,0 @@ -import pymysql -from time import time, sleep -import logging -def mysql_is_ready(): - check_timeout = 120 - check_interval = 5 - start_time = time() - logger = logging.getLogger() - logger.setLevel(logging.INFO) - logger.addHandler(logging.StreamHandler()) - while time() - start_time < check_timeout: - try: - pymysql.connect(host='mysqldb', port=3306, user='sa', password='1234', db='capstone') - print("Connected Successfully.") - return True - except: - sleep(check_interval) - logger.error("We could not connect to {host}:{port} within {check_timeout} seconds.") - return False -mysql_is_ready() \ No newline at end of file