diff --git a/.env.example b/.env.example index 2400801..70a54cf 100644 --- a/.env.example +++ b/.env.example @@ -1,20 +1,69 @@ +# ============================================================================= # WINDMAR Environment Configuration -# Copy this file to .env and adjust values for your environment +# ============================================================================= +# +# Copy this file to .env and configure for your environment: +# cp .env.example .env +# +# IMPORTANT: Never commit .env to version control! +# +# ============================================================================= # ============================================================================= -# API Configuration +# REQUIRED SETTINGS - Must be configured before deployment # ============================================================================= -API_HOST=0.0.0.0 -API_PORT=8000 -API_RELOAD=false -API_LOG_LEVEL=info -# CORS origins (comma-separated) -CORS_ORIGINS=http://localhost:3000,http://localhost:3001 +# Database Configuration +DB_USER=windmar +DB_PASSWORD=CHANGE_THIS_TO_A_SECURE_PASSWORD +DB_NAME=windmar +DB_HOST=db +DB_PORT=5432 +DATABASE_URL=postgresql://${DB_USER}:${DB_PASSWORD}@${DB_HOST}:${DB_PORT}/${DB_NAME} + +# Redis Configuration +REDIS_PASSWORD=CHANGE_THIS_TO_A_SECURE_PASSWORD +REDIS_URL=redis://:${REDIS_PASSWORD}@redis:6379/0 + +# API Security - CRITICAL: Generate a unique secret key! +# Generate with: openssl rand -hex 32 +API_SECRET_KEY=CHANGE_THIS_GENERATE_WITH_OPENSSL_RAND_HEX_32 + +# CORS Origins - Comma-separated list of allowed origins +# Remove localhost entries for production! +CORS_ORIGINS=https://yourdomain.com,https://www.yourdomain.com + +# ============================================================================= +# APPLICATION SETTINGS +# ============================================================================= + +# Environment: development, staging, production +ENVIRONMENT=production + +# Logging level: debug, info, warning, error +LOG_LEVEL=info + +# Authentication (MUST be true in production) +AUTH_ENABLED=true + +# Rate limiting (recommended for production) +RATE_LIMIT_ENABLED=true +RATE_LIMIT_PER_MINUTE=60 +RATE_LIMIT_PER_HOUR=1000 + +# ============================================================================= +# SERVICE PORTS (for docker-compose) +# ============================================================================= + +API_PORT=8000 +FRONTEND_PORT=3000 +# DB_PORT=5432 # Uncomment only if you need external DB access +# REDIS_PORT=6379 # Uncomment only if you need external Redis access # ============================================================================= -# Live Data Configuration +# WEATHER DATA CONFIGURATION # ============================================================================= + # Use mock data (true) or real Copernicus data (false) COPERNICUS_MOCK_MODE=true @@ -24,33 +73,52 @@ COPERNICUS_USERNAME= COPERNICUS_PASSWORD= # ============================================================================= -# Calibration Configuration +# MONITORING & OBSERVABILITY # ============================================================================= + +# Sentry DSN for error tracking (optional but recommended) +SENTRY_DSN= + +# Enable Prometheus metrics endpoint +METRICS_ENABLED=true + +# ============================================================================= +# CALIBRATION & SIMULATION +# ============================================================================= + # Learning rate for coefficient updates (0.001 - 0.1) CALIBRATION_LEARNING_RATE=0.01 -# Path to persist calibration state (optional) +# Path to persist calibration state CALIBRATION_PERSISTENCE_PATH=data/calibration.json -# ============================================================================= -# Simulation Defaults -# ============================================================================= -# Default wave parameters for simulation mode +# Simulation defaults SIM_WAVE_HEIGHT_M=2.5 SIM_WAVE_PERIOD_S=8.0 -SIM_START_LAT=43.5 -SIM_START_LON=7.0 -SIM_SPEED_KTS=12.0 -SIM_HEADING_DEG=270.0 # ============================================================================= -# Data Storage +# DATA STORAGE # ============================================================================= + GRIB_CACHE_DIR=data/grib_cache DATA_DIR=data # ============================================================================= -# Logging +# PRODUCTION CHECKLIST +# ============================================================================= +# +# Before deploying to production, ensure: +# +# [ ] DB_PASSWORD is a strong, unique password +# [ ] REDIS_PASSWORD is a strong, unique password +# [ ] API_SECRET_KEY is generated with: openssl rand -hex 32 +# [ ] CORS_ORIGINS contains only your production domains (no localhost) +# [ ] AUTH_ENABLED=true +# [ ] RATE_LIMIT_ENABLED=true +# [ ] ENVIRONMENT=production +# [ ] SENTRY_DSN is configured for error tracking +# [ ] SSL/TLS is configured via reverse proxy +# [ ] Database backups are scheduled +# [ ] Log aggregation is configured +# # ============================================================================= -LOG_LEVEL=INFO -LOG_FORMAT=%(asctime)s - %(name)s - %(levelname)s - %(message)s diff --git a/Dockerfile b/Dockerfile index 1ccba56..f65f78e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,15 +1,26 @@ -# Multi-stage build for WINDMAR backend -FROM python:3.11-slim as base +# ============================================================================= +# WINDMAR API - Production Dockerfile +# ============================================================================= +# Multi-stage build optimized for security and performance +# +# Build: docker build -t windmar-api:latest . +# Run: docker run -p 8000:8000 windmar-api:latest +# ============================================================================= -# Set environment variables +# ----------------------------------------------------------------------------- +# Stage 1: Build dependencies +# ----------------------------------------------------------------------------- +FROM python:3.11-slim AS builder + +# Set build-time environment variables ENV PYTHONUNBUFFERED=1 \ PYTHONDONTWRITEBYTECODE=1 \ PIP_NO_CACHE_DIR=1 \ PIP_DISABLE_PIP_VERSION_CHECK=1 -WORKDIR /app +WORKDIR /build -# Install system dependencies for scientific libraries +# Install build dependencies RUN apt-get update && apt-get install -y --no-install-recommends \ gcc \ g++ \ @@ -19,26 +30,79 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ libproj-dev \ && rm -rf /var/lib/apt/lists/* -# Copy requirements first for better caching +# Copy requirements and install dependencies COPY requirements.txt . +RUN pip install --no-cache-dir --target=/build/deps -r requirements.txt + +# ----------------------------------------------------------------------------- +# Stage 2: Production runtime +# ----------------------------------------------------------------------------- +FROM python:3.11-slim AS runtime + +# Labels for container metadata +LABEL org.opencontainers.image.title="WINDMAR API" \ + org.opencontainers.image.description="Maritime Route Optimization API" \ + org.opencontainers.image.vendor="SL Mar" \ + org.opencontainers.image.version="2.1.0" \ + org.opencontainers.image.licenses="Commercial" -# Install Python dependencies -RUN pip install --no-cache-dir -r requirements.txt +# Security: Run as non-root user +RUN groupadd --gid 1000 windmar \ + && useradd --uid 1000 --gid windmar --shell /bin/bash --create-home windmar + +# Set runtime environment variables +ENV PYTHONUNBUFFERED=1 \ + PYTHONDONTWRITEBYTECODE=1 \ + PYTHONPATH=/app \ + PATH="/app/deps/bin:$PATH" \ + # Application defaults (override via environment) + API_HOST=0.0.0.0 \ + API_PORT=8000 \ + LOG_LEVEL=info \ + ENVIRONMENT=production + +WORKDIR /app + +# Install runtime system dependencies only +RUN apt-get update && apt-get install -y --no-install-recommends \ + libeccodes0 \ + libgeos-c1v5 \ + libproj25 \ + curl \ + && rm -rf /var/lib/apt/lists/* \ + && apt-get clean + +# Copy Python dependencies from builder +COPY --from=builder /build/deps /app/deps # Copy application code -COPY src/ ./src/ -COPY api/ ./api/ -COPY data/ ./data/ +COPY --chown=windmar:windmar src/ ./src/ +COPY --chown=windmar:windmar api/ ./api/ +COPY --chown=windmar:windmar LICENSE ./ + +# Create necessary directories with correct permissions +RUN mkdir -p data/grib data/vessel_database data/calibration data/weather_cache logs \ + && chown -R windmar:windmar /app -# Create data directories -RUN mkdir -p data/grib data/vessel_database data/calibration +# Switch to non-root user +USER windmar # Expose API port EXPOSE 8000 -# Health check -HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ - CMD python -c "import requests; requests.get('http://localhost:8000/api/health')" || exit 1 +# Health check with curl (more reliable than Python in minimal image) +HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \ + CMD curl -f http://localhost:8000/api/health || exit 1 -# Run the API server -CMD ["uvicorn", "api.main:app", "--host", "0.0.0.0", "--port", "8000", "--workers", "4"] +# Run the API server with production settings +# - Workers based on CPU cores (2 * cores + 1 is recommended) +# - Access log disabled for performance (structured logging handles this) +# - Proxy headers enabled for load balancer compatibility +CMD ["python", "-m", "uvicorn", "api.main:app", \ + "--host", "0.0.0.0", \ + "--port", "8000", \ + "--workers", "4", \ + "--proxy-headers", \ + "--forwarded-allow-ips", "*", \ + "--access-log", \ + "--log-level", "info"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..e298449 --- /dev/null +++ b/LICENSE @@ -0,0 +1,132 @@ +WINDMAR Commercial Software License Agreement +Version 1.0 + +Copyright (c) 2024-2026 SL Mar. All rights reserved. + +IMPORTANT: READ CAREFULLY BEFORE USING THIS SOFTWARE + +This Commercial Software License Agreement ("Agreement") is a legal agreement +between you (either an individual or an entity, "Licensee") and SL Mar +("Licensor") for the WINDMAR Maritime Route Optimization Software ("Software"). + +By installing, copying, or otherwise using the Software, you agree to be bound +by the terms of this Agreement. If you do not agree to these terms, do not +install or use the Software. + +1. GRANT OF LICENSE + + Subject to the terms and conditions of this Agreement and payment of the + applicable license fees, Licensor grants Licensee a non-exclusive, + non-transferable, limited license to: + + a) Install and use the Software on servers owned or controlled by Licensee + for Licensee's internal business purposes. + + b) Make a reasonable number of backup copies of the Software for archival + purposes. + + c) Deploy the Software in production environments as specified in the + purchased license tier. + +2. LICENSE TIERS + + The Software is licensed under the following tiers: + + a) SINGLE-SERVER LICENSE: Permits installation on one (1) server instance. + + b) ENTERPRISE LICENSE: Permits installation on unlimited server instances + within Licensee's organization. + + c) OEM LICENSE: Permits redistribution as part of Licensee's own product, + subject to separate OEM agreement terms. + +3. RESTRICTIONS + + Licensee shall NOT: + + a) Sublicense, sell, rent, lease, or otherwise transfer the Software to + any third party without prior written consent from Licensor. + + b) Modify, translate, adapt, reverse engineer, decompile, disassemble, or + create derivative works based on the Software. + + c) Remove or alter any proprietary notices, labels, or marks on the Software. + + d) Use the Software to provide services to third parties (SaaS) without an + appropriate license tier. + + e) Use the Software in violation of any applicable laws or regulations. + +4. INTELLECTUAL PROPERTY + + The Software is protected by copyright laws and international treaty + provisions. Licensor retains all intellectual property rights in the + Software. This Agreement does not grant Licensee any rights to trademarks + or service marks of Licensor. + +5. SUPPORT AND UPDATES + + a) Licensed users are entitled to receive software updates and bug fixes + for the duration of their active support subscription. + + b) Technical support is provided according to the support tier purchased. + + c) Support subscriptions are sold separately and are not included in the + base license fee unless explicitly stated. + +6. WARRANTY DISCLAIMER + + THE SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE, AND NONINFRINGEMENT. LICENSOR DOES NOT + WARRANT THAT THE SOFTWARE WILL BE ERROR-FREE OR UNINTERRUPTED. + +7. LIMITATION OF LIABILITY + + IN NO EVENT SHALL LICENSOR BE LIABLE FOR ANY INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; + OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR + OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + LICENSOR'S TOTAL LIABILITY SHALL NOT EXCEED THE AMOUNT PAID BY LICENSEE + FOR THE SOFTWARE LICENSE. + +8. TERMINATION + + a) This Agreement is effective until terminated. + + b) Licensor may terminate this Agreement immediately upon written notice + if Licensee breaches any term of this Agreement. + + c) Upon termination, Licensee must destroy all copies of the Software in + their possession. + + d) Sections 4, 6, 7, and 9 shall survive termination. + +9. GOVERNING LAW + + This Agreement shall be governed by and construed in accordance with the + laws of the jurisdiction in which Licensor is incorporated, without regard + to its conflict of law provisions. + +10. ENTIRE AGREEMENT + + This Agreement constitutes the entire agreement between the parties with + respect to the Software and supersedes all prior or contemporaneous + understandings regarding such subject matter. + +11. CONTACT + + For licensing inquiries, please contact: + + SL Mar + Email: licensing@windmar.io + Website: https://windmar.io + +--- + +BY USING THIS SOFTWARE, YOU ACKNOWLEDGE THAT YOU HAVE READ THIS AGREEMENT, +UNDERSTAND IT, AND AGREE TO BE BOUND BY ITS TERMS AND CONDITIONS. diff --git a/PRODUCTION_READINESS_REVIEW.md b/PRODUCTION_READINESS_REVIEW.md new file mode 100644 index 0000000..62c27c9 --- /dev/null +++ b/PRODUCTION_READINESS_REVIEW.md @@ -0,0 +1,236 @@ +# Production Readiness Review Report + +## WINDMAR Maritime Route Optimizer + +**Review Date:** 2026-01-26 +**Reviewer:** Senior Staff Engineer +**Codebase Version:** Commit `0acc1bf` + +--- + +## Executive Summary + +**Verdict: Yes‑with‑risks** + +The WINDMAR application demonstrates solid foundational engineering practices with a well-structured codebase, comprehensive CI/CD pipeline, and reasonable security controls. However, several significant risks must be addressed or explicitly accepted before production launch. + +--- + +## 1. Architecture & Stack Summary + +| Component | Technology | Notes | +|-----------|------------|-------| +| **Backend API** | FastAPI (Python 3.11) | 25+ REST endpoints, WebSocket support | +| **Frontend** | Next.js 15, React 19, TypeScript | 17 React components | +| **Database** | PostgreSQL 16 | UUID keys, JSONB metadata | +| **Cache** | Redis 7 | Rate limiting, session cache | +| **Deployment** | Docker Compose | Multi-stage builds | +| **CI/CD** | GitHub Actions | 7 jobs: tests, security, builds | +| **External APIs** | Copernicus CDS/CMEMS | Weather data with fallback | + +--- + +## 2. Scored Checklist + +| Area | Status | Evidence | Key Risks | Required Actions Before Production | +|------|--------|----------|-----------|-----------------------------------| +| **Architecture Clarity** | 🟢 Green | Clear separation: `api/`, `src/`, `frontend/`. Layered design. README explains structure. | None significant | None required | +| **Tests & CI** | 🟡 Yellow | `tests/unit/` (6 files), `tests/integration/` (2 files). CI runs pytest + lint on every push. Coverage uploaded to Codecov. | No E2E tests. Coverage threshold not enforced. Some endpoints lack tests. | Add E2E smoke tests. Enforce minimum coverage gate. | +| **Security** | 🟡 Yellow | API key auth (`api/auth.py:35-48`), bcrypt hashing, rate limiting (`api/rate_limit.py`). Pydantic input validation. Production config guards (`api/config.py:117-131`). | **CORS has wildcard** (`api/main.py:62`). Dev API key in `docker/init-db.sql:122-124`. No CSP/XSS headers. | Remove wildcard CORS. Remove dev API key from init script. Add security headers middleware. | +| **Observability** | 🟡 Yellow | Logging in 5 API modules (41 occurrences). Health endpoint (`/api/health`). Sentry DSN configurable. | No structured logging. No metrics endpoint. No request tracing/correlation IDs. | Add JSON structured logging. Implement `/api/metrics` endpoint. Add request ID middleware. | +| **Performance & Scalability** | 🟡 Yellow | Redis caching (60min TTL). DB connection pool (`database.py:16-22`: pool_size=10, max_overflow=20). Uvicorn with 4 workers. | No pagination on list endpoints. Global mutable state in `main.py:323-330`. No load tests. | Add pagination. Refactor global state. Run load tests. | +| **Deployment & Rollback** | 🟡 Yellow | Docker Compose with health checks. CI builds images. `DEPLOYMENT.md` with security checklist. Alembic configured (`alembic.ini`). | No K8s/Helm. No automated rollback. `deploy` job is placeholder. | Implement actual deployment job. Document rollback procedure. | +| **Documentation & Runbooks** | 🟡 Yellow | README, RUN.md, INSTALLATION.md, DEPLOYMENT.md. Auto-generated API docs. Security checklist in DEPLOYMENT.md. | No incident runbooks. No architecture diagrams. No on-call docs. | Create basic runbook. Add architecture diagram. | + +--- + +## 3. Critical Findings + +### 3.1 CORS Wildcard Allows Any Origin (MEDIUM-HIGH RISK) + +**Location:** `api/main.py:62-67` + +```python +app.add_middleware( + CORSMiddleware, + allow_origins=["http://localhost:3000", "http://localhost:3001", "*"], # <-- WILDCARD + allow_credentials=True, + ... +) +``` + +The wildcard `"*"` combined with `allow_credentials=True` exposes the API to CSRF-like attacks from any origin. + +**Recommendation:** Remove `"*"` and use only specific origins from environment configuration. + +### 3.2 Development API Key in Production Init Script (MEDIUM RISK) + +**Location:** `docker/init-db.sql:120-124` + +```sql +-- Insert a default API key for development (hash of "dev_api_key_12345") +-- DO NOT USE IN PRODUCTION +INSERT INTO api_keys (key_hash, name, metadata) VALUES + ('$2b$12$rI8gXH9G0KWj5hLqz...', 'Development Key', ...) +``` + +This key will be created in production databases, potentially allowing unauthorized access. + +**Recommendation:** Remove this INSERT or move to a separate dev-only seed script. + +### 3.3 Global Mutable State (MEDIUM RISK) + +**Location:** `api/main.py:323-330` + +```python +current_vessel_specs = VesselSpecs() +current_vessel_model = VesselModel(specs=current_vessel_specs) +voyage_calculator = VoyageCalculator(vessel_model=current_vessel_model) +route_optimizer = RouteOptimizer(vessel_model=current_vessel_model) +``` + +Global mutable state with `global` keyword usage across endpoints can cause race conditions under concurrent load. + +**Recommendation:** Refactor to use dependency injection or request-scoped instances. + +### 3.4 No E2E/Smoke Tests (LOW-MEDIUM RISK) + +**Evidence:** No Playwright, Cypress, or Selenium configuration found. Docker integration test only checks health endpoints. + +**Recommendation:** Add at least 3-5 critical path E2E tests covering route optimization workflow. + +--- + +## 4. Positive Observations + +1. **Strong Input Validation**: Comprehensive validation module (`src/validation.py`) with clear error messages and tested thoroughly (`tests/unit/test_validation.py`) + +2. **Security Best Practices in Place**: + - API keys hashed with bcrypt (configurable rounds) + - Production config refuses to start with default secrets (`api/config.py:118-131`) + - Rate limiting implemented and configurable + +3. **Robust CI Pipeline**: 7 distinct jobs including security scanning (Trivy, Safety), code quality (Black, flake8, pylint, radon), and multi-service integration tests + +4. **Graceful Degradation**: Weather data falls back to synthetic provider when Copernicus is unavailable (`api/main.py:395-399`) + +5. **Health Checks Everywhere**: Docker Compose services have health checks; API has dedicated health endpoint + +--- + +## 5. Prioritized Actions Before Production Launch + +| Priority | Action | Effort | Risk Addressed | +|----------|--------|--------|----------------| +| **P0** | Remove CORS wildcard from `api/main.py:62` | 5 min | Security | +| **P0** | Remove/move dev API key INSERT from `docker/init-db.sql:122-124` | 10 min | Security | +| **P1** | Add security headers middleware (CSP, X-Frame-Options, etc.) | 1 hour | Security | +| **P1** | Implement actual deployment job in CI | 2-4 hours | Deployment | +| **P2** | Add pagination to list endpoints (`/api/routes`, `/api/vessels`) | 2 hours | Performance | +| **P2** | Add structured JSON logging | 1-2 hours | Observability | +| **P2** | Create basic incident runbook | 2 hours | Operations | +| **P3** | Refactor global state to dependency injection | 4-8 hours | Performance/Reliability | +| **P3** | Add E2E smoke tests | 4-8 hours | Quality | +| **P3** | Add request ID/correlation tracing | 2-4 hours | Observability | + +--- + +## 6. Deployment Readiness Checklist + +Before launch, verify: + +- [ ] `API_SECRET_KEY` changed from default +- [ ] `AUTH_ENABLED=true` in production +- [ ] CORS_ORIGINS contains only production domains (no localhost, no wildcard) +- [ ] `RATE_LIMIT_ENABLED=true` +- [ ] Dev API key removed from database +- [ ] SSL/TLS configured via reverse proxy +- [ ] Database backups scheduled +- [ ] Monitoring/alerting configured (Sentry DSN set) +- [ ] Log aggregation in place + +--- + +## 7. Final Verdict + +### **Yes‑with‑risks** + +The application is fundamentally sound and demonstrates good engineering practices. It **can** be deployed to production, provided: + +1. **P0 items are fixed** (CORS wildcard, dev API key) - estimated 15 minutes +2. **Risks are explicitly accepted** by stakeholders for P1-P3 items +3. **Limited initial exposure** - consider soft launch to subset of users while addressing remaining items + +The codebase shows production-quality patterns in authentication, validation, CI/CD, and deployment configuration. The identified issues are addressable and do not indicate systemic problems with the codebase architecture. + +--- + +## Appendix A: Files Reviewed + +### Core API Files +- `api/main.py` - FastAPI application (1,726 lines) +- `api/auth.py` - Authentication module +- `api/config.py` - Configuration management +- `api/database.py` - Database connection +- `api/rate_limit.py` - Rate limiting + +### Source Modules +- `src/validation.py` - Input validation +- `src/optimization/` - Route optimization engine + +### Configuration +- `docker-compose.yml` - Container orchestration +- `Dockerfile` - Backend container +- `frontend/Dockerfile` - Frontend container +- `.github/workflows/ci.yml` - CI/CD pipeline +- `docker/init-db.sql` - Database initialization + +### Tests +- `tests/unit/` - 6 unit test files +- `tests/integration/` - 2 integration test files + +### Documentation +- `README.md` +- `DEPLOYMENT.md` +- `RUN.md` +- `INSTALLATION.md` + +--- + +## Appendix B: Security Hardening Recommendations + +### Immediate (P0) + +```python +# api/main.py - Replace lines 61-67 with: +app.add_middleware( + CORSMiddleware, + allow_origins=settings.cors_origins_list, # Use environment config + allow_credentials=True, + allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"], + allow_headers=["*"], +) +``` + +### Short-term (P1) + +Add security headers middleware: + +```python +from starlette.middleware.base import BaseHTTPMiddleware + +class SecurityHeadersMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request, call_next): + response = await call_next(request) + response.headers["X-Content-Type-Options"] = "nosniff" + response.headers["X-Frame-Options"] = "DENY" + response.headers["X-XSS-Protection"] = "1; mode=block" + response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin" + return response + +app.add_middleware(SecurityHeadersMiddleware) +``` + +--- + +*Report generated by Production Readiness Review process* diff --git a/api/cli.py b/api/cli.py new file mode 100644 index 0000000..9129b5f --- /dev/null +++ b/api/cli.py @@ -0,0 +1,217 @@ +#!/usr/bin/env python3 +""" +WINDMAR API CLI Tool. + +Command-line interface for administrative tasks: +- API key management (create, list, revoke) +- Database operations +- Health checks + +Usage: + python -m api.cli create-api-key --name "My App" + python -m api.cli list-api-keys + python -m api.cli revoke-api-key --id + python -m api.cli check-health +""" +import argparse +import sys +from datetime import datetime, timedelta +from typing import Optional + +# Ensure imports work +from pathlib import Path +sys.path.insert(0, str(Path(__file__).parent.parent)) + + +def create_api_key( + name: str, + rate_limit: int = 1000, + expires_days: Optional[int] = None +) -> None: + """Create a new API key.""" + from api.database import get_db_context + from api.auth import create_api_key_in_db + + expires_at = None + if expires_days: + expires_at = datetime.utcnow() + timedelta(days=expires_days) + + with get_db_context() as db: + plain_key, api_key_obj = create_api_key_in_db( + db=db, + name=name, + rate_limit=rate_limit, + expires_at=expires_at, + ) + + print("\n" + "=" * 60) + print("API KEY CREATED SUCCESSFULLY") + print("=" * 60) + print(f"\nName: {name}") + print(f"Key ID: {api_key_obj.id}") + print(f"Rate Limit: {rate_limit} requests/hour") + if expires_at: + print(f"Expires: {expires_at.isoformat()}") + else: + print("Expires: Never") + print(f"\n{'*' * 60}") + print(f"API KEY: {plain_key}") + print(f"{'*' * 60}") + print("\nSAVE THIS KEY NOW - IT CANNOT BE RETRIEVED LATER!") + print("=" * 60 + "\n") + + +def list_api_keys() -> None: + """List all API keys.""" + from api.database import get_db_context + from api.models import APIKey + + with get_db_context() as db: + keys = db.query(APIKey).order_by(APIKey.created_at.desc()).all() + + if not keys: + print("\nNo API keys found.") + return + + print("\n" + "=" * 80) + print("API KEYS") + print("=" * 80) + print(f"{'ID':<36} {'Name':<20} {'Active':<8} {'Rate Limit':<12} {'Last Used':<20}") + print("-" * 80) + + for key in keys: + last_used = key.last_used_at.strftime("%Y-%m-%d %H:%M") if key.last_used_at else "Never" + print( + f"{str(key.id):<36} " + f"{key.name[:18]:<20} " + f"{'Yes' if key.is_active else 'No':<8} " + f"{key.rate_limit:<12} " + f"{last_used:<20}" + ) + + print("=" * 80) + print(f"Total: {len(keys)} key(s)\n") + + +def revoke_api_key(key_id: str) -> None: + """Revoke an API key.""" + from api.database import get_db_context + from api.auth import revoke_api_key as revoke_key + + with get_db_context() as db: + success = revoke_key(db, key_id) + + if success: + print(f"\nAPI key {key_id} has been revoked.") + else: + print(f"\nError: API key {key_id} not found.") + sys.exit(1) + + +def check_health() -> None: + """Check API health.""" + import requests + + url = "http://localhost:8000/api/health" + try: + response = requests.get(url, timeout=5) + if response.status_code == 200: + data = response.json() + print(f"\nAPI Status: {data.get('status', 'unknown')}") + print(f"Version: {data.get('version', 'unknown')}") + print(f"Timestamp: {data.get('timestamp', 'unknown')}") + else: + print(f"\nAPI returned status code: {response.status_code}") + sys.exit(1) + except requests.exceptions.ConnectionError: + print("\nError: Could not connect to API. Is the server running?") + sys.exit(1) + except Exception as e: + print(f"\nError: {e}") + sys.exit(1) + + +def init_db() -> None: + """Initialize the database.""" + from api.database import init_db as do_init + + print("Initializing database...") + do_init() + print("Database initialized successfully.") + + +def main(): + parser = argparse.ArgumentParser( + description="WINDMAR API CLI Tool", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + Create an API key: + python -m api.cli create-api-key --name "Production App" + + Create an API key that expires in 90 days: + python -m api.cli create-api-key --name "Trial Key" --expires-days 90 + + List all API keys: + python -m api.cli list-api-keys + + Revoke an API key: + python -m api.cli revoke-api-key --id 12345678-1234-1234-1234-123456789abc + + Check API health: + python -m api.cli check-health + + Initialize database: + python -m api.cli init-db + """ + ) + + subparsers = parser.add_subparsers(dest="command", help="Available commands") + + # create-api-key + create_parser = subparsers.add_parser("create-api-key", help="Create a new API key") + create_parser.add_argument("--name", required=True, help="Name for the API key") + create_parser.add_argument( + "--rate-limit", + type=int, + default=1000, + help="Rate limit (requests per hour, default: 1000)" + ) + create_parser.add_argument( + "--expires-days", + type=int, + help="Number of days until expiration (default: never)" + ) + + # list-api-keys + subparsers.add_parser("list-api-keys", help="List all API keys") + + # revoke-api-key + revoke_parser = subparsers.add_parser("revoke-api-key", help="Revoke an API key") + revoke_parser.add_argument("--id", required=True, help="UUID of the API key to revoke") + + # check-health + subparsers.add_parser("check-health", help="Check API health") + + # init-db + subparsers.add_parser("init-db", help="Initialize the database") + + args = parser.parse_args() + + if args.command == "create-api-key": + create_api_key(args.name, args.rate_limit, args.expires_days) + elif args.command == "list-api-keys": + list_api_keys() + elif args.command == "revoke-api-key": + revoke_api_key(args.id) + elif args.command == "check-health": + check_health() + elif args.command == "init-db": + init_db() + else: + parser.print_help() + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/api/main.py b/api/main.py index 477e757..5135ae5 100644 --- a/api/main.py +++ b/api/main.py @@ -6,6 +6,9 @@ - Route management (waypoints, RTZ import) - Voyage calculation (per-leg SOG, ETA, fuel) - Vessel configuration + +Version: 2.1.0 +License: Commercial - See LICENSE file """ import io @@ -16,8 +19,9 @@ from typing import Dict, List, Optional, Tuple import numpy as np -from fastapi import FastAPI, HTTPException, UploadFile, File, Query +from fastapi import FastAPI, HTTPException, UploadFile, File, Query, Response from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import PlainTextResponse from pydantic import BaseModel, Field import uvicorn @@ -42,29 +46,98 @@ from src.data.regulatory_zones import ( get_zone_checker, Zone, ZoneProperties, ZoneType, ZoneInteraction ) +from api.config import settings +from api.middleware import ( + setup_middleware, + metrics_collector, + structured_logger, + get_request_id, +) -# Configure logging -logging.basicConfig(level=logging.INFO) +# Configure structured logging for production +logging.basicConfig( + level=getattr(logging, settings.log_level.upper(), logging.INFO), + format='%(message)s', # JSON logs are self-contained +) logger = logging.getLogger(__name__) -# Initialize FastAPI app -app = FastAPI( - title="WINDMAR API", - description="Maritime Route Optimization API - Weather, Routes, Voyage Planning", - version="2.0.0", - docs_url="/api/docs", - redoc_url="/api/redoc", -) +# ============================================================================= +# Application Factory +# ============================================================================= + +def create_app() -> FastAPI: + """ + Application factory for WINDMAR API. + + Creates and configures the FastAPI application with all middleware, + routes, and dependencies. Supports both production and development modes. + + Returns: + FastAPI: Configured application instance + """ + application = FastAPI( + title="WINDMAR API", + description=""" +## Maritime Route Optimization API + +Professional-grade API for maritime route optimization, weather routing, +and voyage planning. + +### Features +- Real-time weather data integration (Copernicus CDS/CMEMS) +- A* pathfinding route optimization +- Vessel performance modeling with calibration +- Regulatory zone management (ECA, HRA, TSS) +- Fuel consumption prediction + +### Authentication +API key authentication required for all endpoints except health checks. +Include your API key in the `X-API-Key` header. + +### Rate Limiting +- 60 requests per minute +- 1000 requests per hour + +### Support +Contact: support@windmar.io + """, + version="2.1.0", + docs_url="/api/docs", + redoc_url="/api/redoc", + openapi_url="/api/openapi.json", + license_info={ + "name": "Commercial License", + "url": "https://windmar.io/license", + }, + contact={ + "name": "WINDMAR Support", + "url": "https://windmar.io", + "email": "support@windmar.io", + }, + ) + + # Setup production middleware (security headers, logging, metrics, etc.) + setup_middleware( + application, + debug=settings.is_development, + enable_hsts=settings.is_production, + ) + + # CORS middleware - use configured origins only (NO WILDCARDS) + application.add_middleware( + CORSMiddleware, + allow_origins=settings.cors_origins_list, + allow_credentials=True, + allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"], + allow_headers=["*"], + ) + + return application -# CORS middleware for frontend -app.add_middleware( - CORSMiddleware, - allow_origins=["http://localhost:3000", "http://localhost:3001", "*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], -) + +# Create the application +app = create_app() # ============================================================================ @@ -547,27 +620,72 @@ def weather_provider(lat: float, lon: float, time: datetime) -> LegWeather: # API Endpoints - Core # ============================================================================ -@app.get("/") +@app.get("/", tags=["System"]) async def root(): - """API root.""" + """ + API root endpoint. + + Returns basic API information and available endpoint categories. + """ return { "name": "WINDMAR API", - "version": "2.0.0", + "version": "2.1.0", "status": "operational", "docs": "/api/docs", "endpoints": { + "health": "/api/health", + "metrics": "/api/metrics", "weather": "/api/weather/...", "routes": "/api/routes/...", "voyage": "/api/voyage/...", "vessel": "/api/vessel/...", + "zones": "/api/zones/...", } } -@app.get("/api/health") +@app.get("/api/health", tags=["System"]) async def health_check(): - """Health check.""" - return {"status": "healthy", "timestamp": datetime.utcnow().isoformat()} + """ + Health check endpoint for load balancers and orchestrators. + + Returns: + - status: Service health status + - timestamp: Current UTC timestamp + - version: API version + - request_id: Correlation ID for tracing + """ + return { + "status": "healthy", + "timestamp": datetime.utcnow().isoformat() + "Z", + "version": "2.1.0", + "request_id": get_request_id(), + } + + +@app.get("/api/metrics", tags=["System"], response_class=PlainTextResponse) +async def get_metrics(): + """ + Prometheus-compatible metrics endpoint. + + Returns metrics in Prometheus exposition format for scraping. + Includes: + - Request counts by endpoint and status + - Request duration summaries + - Error counts + - Service uptime + """ + return metrics_collector.get_prometheus_metrics() + + +@app.get("/api/metrics/json", tags=["System"]) +async def get_metrics_json(): + """ + Metrics endpoint in JSON format. + + Alternative to Prometheus format for custom dashboards. + """ + return metrics_collector.get_metrics() @app.get("/api/data-sources") diff --git a/api/middleware.py b/api/middleware.py new file mode 100644 index 0000000..a1b1baf --- /dev/null +++ b/api/middleware.py @@ -0,0 +1,437 @@ +""" +Production-grade middleware for WINDMAR API. + +Provides: +- Security headers (CSP, XSS protection, etc.) +- Request ID tracking for distributed tracing +- Structured logging with correlation IDs +- Request/response timing metrics +- Error handling and sanitization +""" +import time +import uuid +import logging +import json +from typing import Callable, Optional +from contextvars import ContextVar +from datetime import datetime + +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.requests import Request +from starlette.responses import Response, JSONResponse +from fastapi import FastAPI + +# Context variable for request ID (thread-safe) +request_id_ctx: ContextVar[Optional[str]] = ContextVar("request_id", default=None) + + +def get_request_id() -> Optional[str]: + """Get the current request ID from context.""" + return request_id_ctx.get() + + +class StructuredLogger: + """ + Structured JSON logger for production environments. + + Outputs logs in JSON format with consistent fields for log aggregation + systems like ELK, Datadog, or CloudWatch. + """ + + def __init__(self, name: str): + self.logger = logging.getLogger(name) + + def _log(self, level: str, message: str, **kwargs): + """Internal log method with structured output.""" + log_entry = { + "timestamp": datetime.utcnow().isoformat() + "Z", + "level": level, + "message": message, + "service": "windmar-api", + "request_id": get_request_id(), + **kwargs + } + + # Remove None values + log_entry = {k: v for k, v in log_entry.items() if v is not None} + + getattr(self.logger, level.lower())(json.dumps(log_entry)) + + def info(self, message: str, **kwargs): + self._log("INFO", message, **kwargs) + + def warning(self, message: str, **kwargs): + self._log("WARNING", message, **kwargs) + + def error(self, message: str, **kwargs): + self._log("ERROR", message, **kwargs) + + def debug(self, message: str, **kwargs): + self._log("DEBUG", message, **kwargs) + + +# Global structured logger instance +structured_logger = StructuredLogger("windmar") + + +class SecurityHeadersMiddleware(BaseHTTPMiddleware): + """ + Adds security headers to all responses. + + Headers added: + - X-Content-Type-Options: Prevents MIME type sniffing + - X-Frame-Options: Prevents clickjacking + - X-XSS-Protection: Enables XSS filtering + - Referrer-Policy: Controls referrer information + - Permissions-Policy: Restricts browser features + - Content-Security-Policy: Controls resource loading + - Strict-Transport-Security: Enforces HTTPS (when enabled) + """ + + def __init__(self, app, enable_hsts: bool = False): + super().__init__(app) + self.enable_hsts = enable_hsts + + async def dispatch(self, request: Request, call_next: Callable) -> Response: + response = await call_next(request) + + # Prevent MIME type sniffing + response.headers["X-Content-Type-Options"] = "nosniff" + + # Prevent clickjacking + response.headers["X-Frame-Options"] = "DENY" + + # Enable XSS filtering + response.headers["X-XSS-Protection"] = "1; mode=block" + + # Control referrer information + response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin" + + # Restrict browser features + response.headers["Permissions-Policy"] = ( + "accelerometer=(), camera=(), geolocation=(), gyroscope=(), " + "magnetometer=(), microphone=(), payment=(), usb=()" + ) + + # Content Security Policy + response.headers["Content-Security-Policy"] = ( + "default-src 'self'; " + "script-src 'self' 'unsafe-inline' 'unsafe-eval'; " + "style-src 'self' 'unsafe-inline'; " + "img-src 'self' data: https:; " + "font-src 'self' data:; " + "connect-src 'self' https:; " + "frame-ancestors 'none';" + ) + + # HSTS - only enable in production with HTTPS + if self.enable_hsts: + response.headers["Strict-Transport-Security"] = ( + "max-age=31536000; includeSubDomains; preload" + ) + + return response + + +class RequestIdMiddleware(BaseHTTPMiddleware): + """ + Adds unique request ID to each request for distributed tracing. + + The request ID is: + - Generated as a UUID4 if not provided + - Accepted from X-Request-ID header if provided (for tracing across services) + - Added to response headers for client correlation + - Available via get_request_id() for logging + """ + + HEADER_NAME = "X-Request-ID" + + async def dispatch(self, request: Request, call_next: Callable) -> Response: + # Get or generate request ID + request_id = request.headers.get(self.HEADER_NAME) or str(uuid.uuid4()) + + # Set in context for access throughout request lifecycle + token = request_id_ctx.set(request_id) + + try: + response = await call_next(request) + response.headers[self.HEADER_NAME] = request_id + return response + finally: + request_id_ctx.reset(token) + + +class RequestLoggingMiddleware(BaseHTTPMiddleware): + """ + Logs all requests with timing and metadata. + + Logs include: + - Method, path, query parameters + - Response status code + - Request duration in milliseconds + - Client IP address + - User agent + """ + + # Paths to exclude from logging (health checks, metrics) + EXCLUDED_PATHS = {"/api/health", "/api/metrics", "/health", "/metrics"} + + async def dispatch(self, request: Request, call_next: Callable) -> Response: + # Skip logging for health checks + if request.url.path in self.EXCLUDED_PATHS: + return await call_next(request) + + start_time = time.perf_counter() + + # Get client info + client_ip = request.client.host if request.client else "unknown" + user_agent = request.headers.get("user-agent", "unknown") + + try: + response = await call_next(request) + duration_ms = (time.perf_counter() - start_time) * 1000 + + structured_logger.info( + "Request completed", + method=request.method, + path=request.url.path, + query=str(request.query_params) if request.query_params else None, + status_code=response.status_code, + duration_ms=round(duration_ms, 2), + client_ip=client_ip, + user_agent=user_agent[:100] if user_agent else None, + ) + + return response + + except Exception as e: + duration_ms = (time.perf_counter() - start_time) * 1000 + + structured_logger.error( + "Request failed", + method=request.method, + path=request.url.path, + error=str(e), + error_type=type(e).__name__, + duration_ms=round(duration_ms, 2), + client_ip=client_ip, + ) + raise + + +class ErrorHandlingMiddleware(BaseHTTPMiddleware): + """ + Global error handling with sanitized responses. + + In production: + - Internal errors return generic messages (no stack traces) + - All errors are logged with full details + - Error responses include request ID for support + + In development: + - Full error details are returned + """ + + def __init__(self, app, debug: bool = False): + super().__init__(app) + self.debug = debug + + async def dispatch(self, request: Request, call_next: Callable) -> Response: + try: + return await call_next(request) + except Exception as e: + request_id = get_request_id() + + # Log the full error + structured_logger.error( + "Unhandled exception", + error=str(e), + error_type=type(e).__name__, + path=request.url.path, + method=request.method, + ) + + # Return sanitized error response + if self.debug: + detail = str(e) + else: + detail = "An internal error occurred. Please contact support with the request ID." + + return JSONResponse( + status_code=500, + content={ + "error": "Internal Server Error", + "detail": detail, + "request_id": request_id, + }, + headers={"X-Request-ID": request_id} if request_id else {}, + ) + + +# Metrics storage for Prometheus-style metrics +class MetricsCollector: + """ + Simple in-memory metrics collector for request statistics. + + Collects: + - Request counts by method, path, status + - Request duration histograms + - Error counts + """ + + def __init__(self): + self.request_count: dict = {} + self.request_duration_sum: dict = {} + self.request_duration_count: dict = {} + self.error_count: dict = {} + self.start_time = datetime.utcnow() + + def record_request( + self, + method: str, + path: str, + status_code: int, + duration_seconds: float + ): + """Record a completed request.""" + # Normalize path (remove IDs for aggregation) + normalized_path = self._normalize_path(path) + key = f"{method}:{normalized_path}:{status_code}" + + self.request_count[key] = self.request_count.get(key, 0) + 1 + self.request_duration_sum[key] = ( + self.request_duration_sum.get(key, 0) + duration_seconds + ) + self.request_duration_count[key] = ( + self.request_duration_count.get(key, 0) + 1 + ) + + if status_code >= 500: + error_key = f"{method}:{normalized_path}" + self.error_count[error_key] = self.error_count.get(error_key, 0) + 1 + + def _normalize_path(self, path: str) -> str: + """Normalize path by replacing UUIDs and IDs with placeholders.""" + import re + # Replace UUIDs + path = re.sub( + r'[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}', + '{id}', + path, + flags=re.IGNORECASE + ) + # Replace numeric IDs + path = re.sub(r'/\d+(?=/|$)', '/{id}', path) + return path + + def get_metrics(self) -> dict: + """Get all metrics as a dictionary.""" + uptime = (datetime.utcnow() - self.start_time).total_seconds() + + return { + "uptime_seconds": uptime, + "requests": { + "total": sum(self.request_count.values()), + "by_endpoint": self.request_count, + }, + "latency": { + "sum_seconds": self.request_duration_sum, + "count": self.request_duration_count, + }, + "errors": { + "total": sum(self.error_count.values()), + "by_endpoint": self.error_count, + }, + } + + def get_prometheus_metrics(self) -> str: + """Get metrics in Prometheus exposition format.""" + lines = [] + + # Uptime + uptime = (datetime.utcnow() - self.start_time).total_seconds() + lines.append(f"# HELP windmar_uptime_seconds Time since service start") + lines.append(f"# TYPE windmar_uptime_seconds gauge") + lines.append(f"windmar_uptime_seconds {uptime}") + + # Request count + lines.append(f"# HELP windmar_requests_total Total request count") + lines.append(f"# TYPE windmar_requests_total counter") + for key, count in self.request_count.items(): + method, path, status = key.split(":") + lines.append( + f'windmar_requests_total{{method="{method}",path="{path}",status="{status}"}} {count}' + ) + + # Request duration + lines.append(f"# HELP windmar_request_duration_seconds Request duration") + lines.append(f"# TYPE windmar_request_duration_seconds summary") + for key, total in self.request_duration_sum.items(): + method, path, status = key.split(":") + count = self.request_duration_count.get(key, 1) + avg = total / count if count > 0 else 0 + lines.append( + f'windmar_request_duration_seconds_sum{{method="{method}",path="{path}",status="{status}"}} {total}' + ) + lines.append( + f'windmar_request_duration_seconds_count{{method="{method}",path="{path}",status="{status}"}} {count}' + ) + + # Errors + lines.append(f"# HELP windmar_errors_total Total error count") + lines.append(f"# TYPE windmar_errors_total counter") + for key, count in self.error_count.items(): + method, path = key.split(":") + lines.append( + f'windmar_errors_total{{method="{method}",path="{path}"}} {count}' + ) + + return "\n".join(lines) + + +# Global metrics collector +metrics_collector = MetricsCollector() + + +class MetricsMiddleware(BaseHTTPMiddleware): + """ + Collects request metrics for monitoring. + """ + + EXCLUDED_PATHS = {"/api/health", "/api/metrics", "/health", "/metrics"} + + async def dispatch(self, request: Request, call_next: Callable) -> Response: + if request.url.path in self.EXCLUDED_PATHS: + return await call_next(request) + + start_time = time.perf_counter() + response = await call_next(request) + duration = time.perf_counter() - start_time + + metrics_collector.record_request( + method=request.method, + path=request.url.path, + status_code=response.status_code, + duration_seconds=duration, + ) + + return response + + +def setup_middleware(app: FastAPI, debug: bool = False, enable_hsts: bool = False): + """ + Configure all production middleware for the application. + + Args: + app: FastAPI application instance + debug: Enable debug mode (detailed error messages) + enable_hsts: Enable HSTS header (requires HTTPS) + + Order matters! Middleware is executed in reverse order of addition. + """ + # These are added last but execute first + app.add_middleware(MetricsMiddleware) + app.add_middleware(RequestLoggingMiddleware) + app.add_middleware(RequestIdMiddleware) + app.add_middleware(SecurityHeadersMiddleware, enable_hsts=enable_hsts) + app.add_middleware(ErrorHandlingMiddleware, debug=debug) diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml new file mode 100644 index 0000000..36ae442 --- /dev/null +++ b/docker-compose.prod.yml @@ -0,0 +1,129 @@ +# ============================================================================= +# WINDMAR Production Docker Compose Configuration +# ============================================================================= +# +# This file extends docker-compose.yml with production-specific settings. +# +# Usage: +# docker-compose -f docker-compose.yml -f docker-compose.prod.yml up -d +# +# Or set COMPOSE_FILE environment variable: +# export COMPOSE_FILE=docker-compose.yml:docker-compose.prod.yml +# docker-compose up -d +# +# ============================================================================= + +version: '3.8' + +services: + db: + # Production database settings + deploy: + resources: + limits: + memory: 2G + reservations: + memory: 512M + environment: + # Enforce password requirement + POSTGRES_HOST_AUTH_METHOD: scram-sha-256 + # Don't expose ports externally in production + ports: [] + logging: + driver: "json-file" + options: + max-size: "100m" + max-file: "5" + + redis: + # Production Redis settings + command: > + redis-server + --appendonly yes + --requirepass ${REDIS_PASSWORD} + --maxmemory 256mb + --maxmemory-policy allkeys-lru + deploy: + resources: + limits: + memory: 512M + reservations: + memory: 128M + # Don't expose ports externally in production + ports: [] + logging: + driver: "json-file" + options: + max-size: "50m" + max-file: "3" + + api: + # Production API settings + environment: + ENVIRONMENT: production + LOG_LEVEL: info + AUTH_ENABLED: "true" + RATE_LIMIT_ENABLED: "true" + deploy: + resources: + limits: + memory: 4G + reservations: + memory: 1G + replicas: 2 + logging: + driver: "json-file" + options: + max-size: "100m" + max-file: "10" + # Production health check with stricter settings + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/api/health"] + interval: 15s + timeout: 5s + retries: 5 + start_period: 60s + + frontend: + # Production frontend settings + deploy: + resources: + limits: + memory: 1G + reservations: + memory: 256M + logging: + driver: "json-file" + options: + max-size: "50m" + max-file: "5" + + # Optional: Nginx reverse proxy for SSL termination + nginx: + image: nginx:alpine + container_name: windmar-nginx + ports: + - "80:80" + - "443:443" + volumes: + - ./docker/nginx/nginx.conf:/etc/nginx/nginx.conf:ro + - ./docker/nginx/ssl:/etc/nginx/ssl:ro + depends_on: + - api + - frontend + networks: + - windmar-network + restart: unless-stopped + profiles: + - with-nginx + +# Production volumes with backup-friendly naming +volumes: + postgres_data: + name: windmar_postgres_data + redis_data: + name: windmar_redis_data + +networks: + windmar-network: + name: windmar_network diff --git a/docker/init-db.sql b/docker/init-db.sql index dcf1dff..3807aea 100644 --- a/docker/init-db.sql +++ b/docker/init-db.sql @@ -117,8 +117,12 @@ $$ language 'plpgsql'; CREATE TRIGGER update_vessel_specs_updated_at BEFORE UPDATE ON vessel_specs FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); --- Insert a default API key for development (hash of "dev_api_key_12345") --- DO NOT USE IN PRODUCTION -INSERT INTO api_keys (key_hash, name, metadata) VALUES - ('$2b$12$rI8gXH9G0KWj5hLqz.4g3O8QN9X7J5Y6Q8Z1A2B3C4D5E6F7G8H9I0J', 'Development Key', '{"description": "Default development API key. Remove in production!"}') -ON CONFLICT (key_hash) DO NOTHING; +-- ============================================================================ +-- PRODUCTION SECURITY NOTICE +-- ============================================================================ +-- API keys must be created manually after deployment using the CLI tool: +-- +-- docker-compose exec api python -m api.cli create-api-key --name "Production Key" +-- +-- NEVER commit API keys to version control or seed them in init scripts. +-- ============================================================================