diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..03a268b8 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,34 @@ +# Include any files or directories that you don't want to be copied to your +# container here (e.g., local build artifacts, temporary files, etc.). +# +# For more help, visit the .dockerignore file reference guide at +# https://docs.docker.com/go/build-context-dockerignore/ + +**/.DS_Store +**/__pycache__ +**/.venv +**/.classpath +**/.dockerignore +**/.env +**/.git +**/.gitignore +**/.project +**/.settings +**/.toolstarget +**/.vs +**/.vscode +**/*.*proj.user +**/*.dbmdl +**/*.jfm +**/bin +**/charts +**/docker-compose* +**/compose.y*ml +**/Dockerfile* +**/node_modules +**/npm-debug.log +**/obj +**/secrets.dev.yaml +**/values.dev.yaml +LICENSE +README.md diff --git a/.env.example b/.env.example index ecfce9e7..4411d03a 100644 --- a/.env.example +++ b/.env.example @@ -2,4 +2,37 @@ ENV='DEVELOPMENT' SECRET_KEY='unique-secret' ALLOWED_HOSTS='localhost,127.0.0.1' MONGODB_URI='mongodb://localhost:27017' -DB_NAME='todo-app' \ No newline at end of file +DB_NAME='db-name' +# GOOGLE OAUTH SETTINGS +GOOGLE_OAUTH_CLIENT_ID="google-client-id" +GOOGLE_OAUTH_CLIENT_SECRET="client-secret" +GOOGLE_OAUTH_REDIRECT_URI="http://localhost:8000/v1/auth/google/callback" + +PRIVATE_KEY="generate keys and paste here" +PUBLIC_KEY="generate keys and paste here" + +ACCESS_LIFETIME=3600 +REFRESH_LIFETIME=604800 + +ACCESS_TOKEN_COOKIE_NAME='todo-access' +REFRESH_TOKEN_COOKIE_NAME='todo-refresh' +COOKIE_DOMAIN='localhost' +COOKIE_SECURE='true' +COOKIE_HTTPONLY=True +COOKIE_SAMESITE='Strict' + +TODO_UI_BASE_URL='http://localhost:3000' +TODO_UI_REDIRECT_PATH='dashboard' +TODO_BACKEND_BASE_URL='http://localhost:8000' + +CORS_ALLOWED_ORIGINS='http://localhost:3000,http://localhost:8000' + +SWAGGER_UI_PATH='/api/schema' + +ADMIN_EMAILS = "admin@gmail.com,admin2@gmail.com" + +POSTGRES_DB=todo_postgres +POSTGRES_HOST=postgres +POSTGRES_PASSWORD=todo_password +POSTGRES_PORT=5432 +POSTGRES_USER=todo_user \ No newline at end of file diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 00000000..a5ffb769 --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,89 @@ +name: Deploy to EC2 + +on: + push: + branches: + - main + - develop + +jobs: + build-and-push: + runs-on: ubuntu-latest + timeout-minutes: 10 + environment: ${{ github.ref == 'refs/heads/main' && 'production' || 'staging' }} + + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build and push + uses: docker/build-push-action@v5 + with: + context: . + file: production.Dockerfile + platforms: linux/arm64 + push: true + build-args: | + ENV=${{ vars.ENV }} + tags: | + ${{ secrets.DOCKERHUB_USERNAME }}/${{ github.event.repository.name }}:${{ github.sha }} + ${{ secrets.DOCKERHUB_USERNAME }}/${{ github.event.repository.name }}:latest + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Deploy to EC2 + uses: appleboy/ssh-action@v1 + with: + host: ${{ secrets.AWS_EC2_HOST }} + username: ${{ secrets.AWS_EC2_USERNAME }} + key: ${{ secrets.AWS_EC2_SSH_PRIVATE_KEY }} + script: | + docker pull ${{ secrets.DOCKERHUB_USERNAME }}/${{ github.event.repository.name }}:latest + docker stop ${{ github.event.repository.name }}-${{ vars.ENV }} || true + docker rm ${{ github.event.repository.name }}-${{ vars.ENV }} || true + docker run -d -p ${{ vars.PORT }}:8000 \ + --name ${{ github.event.repository.name }}-${{ vars.ENV }} \ + --network=${{ vars.DOCKER_NETWORK }} \ + -e ENV="${{ vars.ENV }}" \ + -e SECRET_KEY="${{ secrets.SECRET_KEY }}" \ + -e DB_NAME="${{ secrets.DB_NAME }}" \ + -e MONGODB_URI="${{ secrets.MONGODB_URI }}" \ + -e ALLOWED_HOSTS="${{ vars.ALLOWED_HOSTS }}" \ + -e GOOGLE_OAUTH_CLIENT_ID="${{ secrets.GOOGLE_OAUTH_CLIENT_ID }}" \ + -e GOOGLE_OAUTH_CLIENT_SECRET="${{ secrets.GOOGLE_OAUTH_CLIENT_SECRET }}" \ + -e GOOGLE_OAUTH_REDIRECT_URI="${{ vars.GOOGLE_OAUTH_REDIRECT_URI }}" \ + -e PUBLIC_KEY="${{ secrets.PUBLIC_KEY }}" \ + -e PRIVATE_KEY="${{ secrets.PRIVATE_KEY }}" \ + -e ACCESS_LIFETIME="${{ vars.ACCESS_LIFETIME }}" \ + -e REFRESH_LIFETIME="${{ vars.REFRESH_LIFETIME }}" \ + -e ACCESS_TOKEN_COOKIE_NAME="${{ vars.ACCESS_TOKEN_COOKIE_NAME }}" \ + -e REFRESH_TOKEN_COOKIE_NAME="${{ vars.REFRESH_TOKEN_COOKIE_NAME }}" \ + -e COOKIE_DOMAIN="${{ vars.COOKIE_DOMAIN }}" \ + -e COOKIE_SECURE="${{ vars.COOKIE_SECURE }}" \ + -e COOKIE_HTTPONLY="${{ vars.COOKIE_HTTPONLY }}" \ + -e COOKIE_SAMESITE="${{ vars.COOKIE_SAMESITE }}" \ + -e TODO_BACKEND_BASE_URL="${{ vars.TODO_BACKEND_BASE_URL }}" \ + -e TODO_UI_BASE_URL="${{ vars.TODO_UI_BASE_URL }}" \ + -e TODO_UI_REDIRECT_PATH="${{ vars.TODO_UI_REDIRECT_PATH }}" \ + -e CORS_ALLOWED_ORIGINS="${{ vars.CORS_ALLOWED_ORIGINS }}" \ + -e SWAGGER_UI_PATH="${{ vars.SWAGGER_UI_PATH }}" \ + -e ADMIN_EMAILS="${{ vars.ADMIN_EMAILS }}" \ + -e POSTGRES_HOST="${{ secrets.POSTGRES_HOST }}" \ + -e POSTGRES_PORT="${{ secrets.POSTGRES_PORT }}" \ + -e POSTGRES_DB="${{ secrets.POSTGRES_DB }}" \ + -e POSTGRES_USER="${{ secrets.POSTGRES_USER }}" \ + -e POSTGRES_PASSWORD="${{ secrets.POSTGRES_PASSWORD }}" \ + -e DUAL_WRITE_ENABLED="${{ vars.DUAL_WRITE_ENABLED }}" \ + -e DUAL_WRITE_SYNC_MODE="${{ vars.DUAL_WRITE_SYNC_MODE }}" \ + -e DUAL_WRITE_RETRY_ATTEMPTS="${{ vars.DUAL_WRITE_RETRY_ATTEMPTS }}" \ + -e DUAL_WRITE_RETRY_DELAY="${{ vars.DUAL_WRITE_RETRY_DELAY }}" \ + ${{ secrets.DOCKERHUB_USERNAME }}/${{ github.event.repository.name }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 5abf49cd..986e1a37 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,15 +7,40 @@ on: jobs: build: runs-on: ubuntu-latest - services: - db: - image: mongo:latest - ports: - - 27017:27017 - + if: ${{ !contains(github.event.pull_request.title, '[skip tests]') }} + env: + SECRET_KEY: "test-secret-key" + ALLOWED_HOSTS: "localhost,127.0.0.1" MONGODB_URI: mongodb://db:27017 DB_NAME: todo-app + GOOGLE_OAUTH_CLIENT_ID: "test-client-id" + GOOGLE_OAUTH_CLIENT_SECRET: "test-client-secret" + GOOGLE_OAUTH_REDIRECT_URI: "http://localhost:8000/v1/auth/google/callback" + PRIVATE_KEY: "test-private-key" + PUBLIC_KEY: "test-public-key" + ACCESS_LIFETIME: "3600" + REFRESH_LIFETIME: "604800" + ACCESS_TOKEN_COOKIE_NAME: "todo-access" + REFRESH_TOKEN_COOKIE_NAME: "todo-refresh" + COOKIE_DOMAIN: "localhost" + COOKIE_SECURE: "False" + COOKIE_HTTPONLY: "True" + COOKIE_SAMESITE: "Lax" + TODO_UI_BASE_URL: "http://localhost:3000" + TODO_UI_REDIRECT_PATH: "dashboard" + TODO_BACKEND_BASE_URL: "http://localhost:8000" + CORS_ALLOWED_ORIGINS: "http://localhost:3000,http://localhost:8000" + ADMIN_EMAILS: "admin@example.com" + POSTGRES_HOST: "localhost" + POSTGRES_PORT: "5432" + POSTGRES_DB: "todo-app" + POSTGRES_USER: "test-user" + POSTGRES_PASSWORD: "test-password" + DUAL_WRITE_ENABLED: "True" + DUAL_WRITE_SYNC_MODE: "async" + DUAL_WRITE_RETRY_ATTEMPTS: "3" + DUAL_WRITE_RETRY_DELAY: "5" steps: - name: Checkout code @@ -24,7 +49,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.11.*' + python-version: "3.11.*" - name: Install dependencies run: | @@ -34,6 +59,10 @@ jobs: run: | ruff check + - name: Format check + run: | + ruff format --check + - name: Run tests run: | - python3.11 manage.py test \ No newline at end of file + python3.11 manage.py test diff --git a/.gitignore b/.gitignore index 85412eac..363935e2 100644 --- a/.gitignore +++ b/.gitignore @@ -103,5 +103,7 @@ dmypy.json cython_debug/ .ruff_cache -mongo_data -logs \ No newline at end of file +/mongo_data +/logs + +/postgres_data \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..d670b691 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,23 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Python: Remote Attach (Django in Docker)", + "type": "debugpy", + "request": "attach", + "connect": { + "host": "localhost", + "port": 5678 + }, + "pathMappings": [ + { + "localRoot": "${workspaceFolder}", + "remoteRoot": "/app" + } + ], + "justMyCode": false, + "django": true, + "subProcess": false + } + ] +} diff --git a/Dockerfile b/Dockerfile index 1df7d310..55e6edc8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ FROM python:3.12-slim-bookworm # Set environment variables -ENV PYTHONUNBUFFERED 1 +ENV PYTHONUNBUFFERED=1 # Set the working directory in the container WORKDIR /app diff --git a/README.Docker.md b/README.Docker.md new file mode 100644 index 00000000..6cef3e44 --- /dev/null +++ b/README.Docker.md @@ -0,0 +1,23 @@ +# Docker Deployment Guide + +### Building and running your application + +When you're ready, start your application by running: +`docker compose up --build`. + +Your application will be available at http://localhost:8000. + +### Deploying your application to the cloud + +First, build your image, e.g.: `docker build -t myapp .`. +If your cloud uses a different CPU architecture than your development +machine (e.g., you are on a Mac M1 and your cloud provider is amd64), +you'll want to build the image for that platform, e.g.: +`docker build --platform=linux/amd64 -t myapp .`. + +Then, push it to your registry, e.g. `docker push myregistry.com/myapp`. + +Consult Docker’s [getting started guide](https://docs.docker.com/go/get-started-sharing/) for more detail on building and pushing. + +### References +* [Docker's Python guide](https://docs.docker.com/language/python/) diff --git a/README.md b/README.md index 7ca8eb4d..98d98099 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# TODO Backend +# TODO Backend - Updated ## Local development setup 1. Install pyenv @@ -36,11 +36,22 @@ ``` python -m pip install -r requirements.txt ``` -6. Create a `.env` file in the root directory, and copy the content from the `.env.example` file to it +6. Create a `.env` file for environment variables: + - Copy the example environment file: + ``` + cp .env.example .env + ``` + - Edit the `.env` file and update the values according to your setup: + - `SECRET_KEY`: Generate a unique secret key for Django + - `MONGODB_URI`: MongoDB connection string (default: `mongodb://localhost:27017`) + - `DB_NAME`: Your database name + - `GOOGLE_OAUTH_CLIENT_ID` and `GOOGLE_OAUTH_CLIENT_SECRET`: OAuth credentials for Google authentication + - `PRIVATE_KEY` and `PUBLIC_KEY`: Generate RSA key pairs for JWT token signing + - Other settings can be left as default for local development 7. Install [docker](https://docs.docker.com/get-docker/) and [docker compose](https://docs.docker.com/compose/install/) 8. Start MongoDB using docker ``` - docker-compose up -d db + docker compose up -d db ``` 9. Start the development server by running the command ``` @@ -62,7 +73,7 @@ 1. Install [docker](https://docs.docker.com/get-docker/) and [docker compose](https://docs.docker.com/compose/install/) 2. Start Django application and MongoDB using docker ``` - docker-compose up -d + docker compose up -d ``` 3. Go to http://127.0.0.1:8000/v1/health API to make sure the server it up. You should see this response ``` @@ -72,6 +83,28 @@ ``` 4. On making changes to code and saving, live reload will work in this case as well +## Database Migrations + +When making changes to Django models, you need to create and apply migrations: + +1. **Create migrations** (run this after modifying models): + ``` + python manage.py makemigrations + ``` + +2. **Apply migrations** (run this to update the database schema): + ``` + python manage.py migrate + ``` + +3. **In Docker environment:** + ``` + docker compose exec django-app python manage.py makemigrations + docker compose exec django-app python manage.py migrate + ``` + +**Note:** The docker-compose.yml automatically runs `migrate` on startup, but you must manually run `makemigrations` after model changes. + ## Command reference 1. To run the tests, run the following command ``` @@ -93,4 +126,48 @@ 5. To fix lint issues ``` ruff check --fix - ``` \ No newline at end of file + ``` + +## Debug Mode with VS Code + +### Prerequisites +- VS Code with Python extension installed +- Docker and docker-compose + +### Debug Setup + +1. **Start the application with debug mode:** + ``` + python manage.py runserver_debug 0.0.0.0:8000 + ``` + +2. **Available debug options:** + ```bash + # Basic debug mode (default debug port 5678) + python manage.py runserver_debug 0.0.0.0:8000 + + # Custom debug port + python manage.py runserver_debug 0.0.0.0:8000 --debug-port 5679 + + # Wait for debugger before starting (useful for debugging startup code) + python manage.py runserver_debug 0.0.0.0:8000 --wait-for-client + ``` + +3. **Attach VS Code debugger:** + - Press `F5` or go to `Run > Start Debugging` + - Select `Python: Remote Attach (Django in Docker)` from the dropdown + - Set breakpoints in your Python code + - Make requests to trigger the breakpoints + +### Debug Features +- **Debug server port**: 5678 (configurable) +- **Path mapping**: Local code mapped to container paths +- **Django mode**: Special Django debugging features enabled +- **Hot reload**: Code changes reflected immediately +- **Variable inspection**: Full debugging capabilities in VS Code + +### Troubleshooting +- If port 5678 is in use, specify a different port with `--debug-port` +- Ensure VS Code Python extension is installed +- Check that breakpoints are set in the correct files +- Verify the debug server shows "Debug server listening on port 5678" diff --git a/docker-compose.yml b/docker-compose.yml index c207d95d..3f1697ac 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,39 +1,115 @@ -version: "3" - services: django-app: build: . container_name: todo-django-app - command: python manage.py runserver 0.0.0.0:8000 + command: > + sh -c " + python manage.py migrate --noinput && + python -Xfrozen_modules=off manage.py runserver_debug 0.0.0.0:8000 --debug-port 5678 + " environment: - MONGODB_URI: mongodb://db:27017 + MONGODB_URI: mongodb://db:27017/?replicaSet=rs0 DB_NAME: todo-app + PYTHONUNBUFFERED: 1 + PYDEVD_DISABLE_FILE_VALIDATION: 1 + # PostgreSQL Configuration + POSTGRES_HOST: postgres + POSTGRES_PORT: 5432 + POSTGRES_DB: todo_postgres + POSTGRES_USER: todo_user + POSTGRES_PASSWORD: todo_password volumes: - .:/app ports: - "8000:8000" + - "5678:5678" # Debug port depends_on: - - db + db: + condition: service_started + mongo-init: + condition: service_completed_successfully + postgres: + condition: service_healthy + stdin_open: true + tty: true + + postgres: + image: postgres:17.6 + container_name: todo-postgres + environment: + POSTGRES_DB: todo_postgres + POSTGRES_USER: todo_user + POSTGRES_PASSWORD: todo_password + ports: + - "5432:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + healthcheck: + test: + [ + "CMD-SHELL", + "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}", + ] + interval: 10s + timeout: 5s + retries: 5 db: image: mongo:latest + command: ["--replSet", "rs0", "--bind_ip_all", "--port", "27017", "--quiet"] container_name: todo-mongo ports: - "27017:27017" volumes: - - ./mongo_data:/data/db + - mongo_data:/data/db healthcheck: - test: ["CMD", "mongosh", "--eval", "'db.runCommand({ping:1})'"] + test: + [ + "CMD", + "mongosh", + "--quiet", + "--eval", + "if (db.runCommand({ping:1}).ok) process.exit(0); else process.exit(1)", + ] interval: 10s timeout: 5s retries: 5 start_period: 15s - - #to enable replica set, requirement for enabling transactions + + # Initialize replica set - this runs once and exits + mongo-init: + image: mongo:latest + depends_on: + db: + condition: service_healthy command: > - sh -c " - mongod --replSet rs0 --bind_ip_all --logpath /var/log/mongodb.log --logappend & - sleep 5 && - mongosh --eval 'try { rs.initiate() } catch(e) { print(e) }' && - tail -f /var/log/mongodb.log + mongosh --host db:27017 --eval " + try { + rs.status(); + print('Replica set already initialized'); + } catch(e) { + print('Initializing replica set...'); + rs.initiate({ + _id: 'rs0', + members: [{ _id: 0, host: 'db:27017' }] + }); + print('Replica set initialized'); + } " + restart: "no" + + mongo-express: + image: mongo-express + container_name: todo-mongo-express + ports: + - 8081:8081 + environment: + ME_CONFIG_MONGODB_URL: mongodb://db:27017/ + ME_CONFIG_BASICAUTH: false + depends_on: + - db + - mongo-init + +volumes: + postgres_data: + mongo_data: diff --git a/docs/DUAL_WRITE_SYSTEM.md b/docs/DUAL_WRITE_SYSTEM.md new file mode 100644 index 00000000..043993d8 --- /dev/null +++ b/docs/DUAL_WRITE_SYSTEM.md @@ -0,0 +1,324 @@ +# Dual-Write System: MongoDB to Postgres + +## Overview + +The dual-write system ensures that all data written to MongoDB is also persisted in a PostgreSQL database with a well-defined schema. This system is designed to enable future migration from MongoDB to Postgres with minimal operational risk and code changes. + +## Architecture + +### Components + +1. **Postgres Models** (`todo/models/postgres/`) + - Mirror MongoDB collections with normalized schema + - Include sync metadata for tracking sync status + - Use `mongo_id` field to maintain reference to MongoDB documents + +2. **Dual-Write Service** (`todo/services/dual_write_service.py`) + - Core service for writing to both databases + - Handles data transformation between MongoDB and Postgres + - Records sync failures for alerting + +3. **Enhanced Dual-Write Service** (`todo/services/enhanced_dual_write_service.py`) + - Extends base service with batch operations + - Provides enhanced monitoring and metrics + - Supports batch operation processing + +4. **Abstract Repository Pattern** (`todo/repositories/abstract_repository.py`) + - Defines interface for data access operations + - Enables seamless switching between databases in the future + - Provides consistent API across different storage backends + +5. **Postgres Repositories** (`todo/repositories/postgres_repository.py`) + - Concrete implementations of abstract repositories + - Handle Postgres-specific operations + - Maintain compatibility with existing MongoDB repositories + +## Configuration + +### Environment Variables + +```bash +# Dual-Write Configuration +DUAL_WRITE_ENABLED=True # Enable/disable dual-write +DUAL_WRITE_RETRY_ATTEMPTS=3 # Number of retry attempts +DUAL_WRITE_RETRY_DELAY=5 # Delay between retries (seconds) + +# Postgres Configuration +POSTGRES_HOST=localhost +POSTGRES_PORT=5432 +POSTGRES_DB=todo_postgres +POSTGRES_USER=todo_user +POSTGRES_PASSWORD=todo_password +``` + +### Django Settings + +The system automatically configures Django to use Postgres as the primary database while maintaining MongoDB connectivity through the existing `DatabaseManager`. + +## Usage + +### Basic Usage + +```python +from todo.services.enhanced_dual_write_service import EnhancedDualWriteService + +# Initialize the service +dual_write_service = EnhancedDualWriteService() + +# Create a document (writes to both MongoDB and Postgres) +success = dual_write_service.create_document( + collection_name='users', + data=user_data, + mongo_id=str(user_id) +) + +# Update a document +success = dual_write_service.update_document( + collection_name='users', + mongo_id=str(user_id), + data=updated_data +) + +# Delete a document +success = dual_write_service.delete_document( + collection_name='users', + mongo_id=str(user_id) +) +``` + +### Batch Operations + +```python +# Perform multiple operations in batch +operations = [ + { + 'collection_name': 'users', + 'data': user_data, + 'mongo_id': str(user_id), + 'operation': 'create' + }, + { + 'collection_name': 'tasks', + 'data': task_data, + 'mongo_id': str(task_id), + 'operation': 'update' + } +] + +success = dual_write_service.batch_operations(operations) +``` + +## Data Mapping + +### MongoDB to Postgres Schema + +| MongoDB Collection | Postgres Table | Key Fields | +|-------------------|----------------|------------| +| `users` | `postgres_users` | `google_id`, `email_id`, `name` | +| `tasks` | `postgres_tasks` | `title`, `status`, `priority`, `created_by` | +| `teams` | `postgres_teams` | `name`, `invite_code`, `created_by` | +| `labels` | `postgres_labels` | `name`, `color` | +| `roles` | `postgres_roles` | `name`, `permissions` | +| `task_assignments` | `postgres_task_assignments` | `task_mongo_id`, `user_mongo_id` | +| `watchlists` | `postgres_watchlists` | `name`, `user_mongo_id` | +| `user_team_details` | `postgres_user_team_details` | `user_id`, `team_id` | +| `user_roles` | `postgres_user_roles` | `user_mongo_id`, `role_mongo_id` | +| `audit_logs` | `postgres_audit_logs` | `action`, `collection_name`, `document_id` | + +### Field Transformations + +- **ObjectId Fields**: Converted to strings (24 characters) +- **Nested Objects**: Flattened or stored in separate tables +- **Arrays**: Stored in junction tables (e.g., `PostgresTaskLabel`) +- **Timestamps**: Preserved as-is +- **Enums**: Mapped to Postgres choices + +## Sync Status Tracking + +Each Postgres record includes sync metadata: + +```python +class SyncMetadata: + sync_status: str # 'SYNCED', 'PENDING', 'FAILED' + sync_error: str # Error message if sync failed + last_sync_at: datetime # Last successful sync timestamp +``` + +## Error Handling and Alerting + +### Sync Failures + +The system automatically records sync failures: + +```python +# Get sync failures +failures = dual_write_service.get_sync_failures() + +# Get sync metrics +metrics = dual_write_service.get_sync_metrics() +``` + +### Alerting + +- **Immediate Logging**: All failures are logged with ERROR level +- **Critical Alerts**: Logged with CRITICAL level for immediate attention +- **Failure Tracking**: Maintains list of recent failures for monitoring + +### Retry Mechanism + +- **Automatic Retries**: Failed operations are automatically retried +- **Configurable Attempts**: Set via `DUAL_WRITE_RETRY_ATTEMPTS` +- **Exponential Backoff**: Delay increases between retry attempts +- **Manual Retry**: Failed operations can be manually retried + +## Monitoring and Health Checks + +### Metrics + +```python +# Get comprehensive sync metrics +metrics = dual_write_service.get_sync_metrics() + +# Check sync status of specific document +status = dual_write_service.get_sync_status('users', str(user_id)) +``` + +## Future Migration Path + +### Phase 1: Dual-Write (Current) +- All writes go to both MongoDB and Postgres +- Reads continue from MongoDB +- Postgres schema is validated and optimized + +### Phase 2: Read Migration +- Gradually shift read operations to Postgres +- Use feature flags to control read source +- Monitor performance and data consistency + +### Phase 3: Full Migration +- All operations use Postgres +- MongoDB becomes read-only backup +- Eventually decommission MongoDB + +### Code Changes Required + +The abstract repository pattern minimizes code changes: + +```python +# Current: MongoDB repository +from todo.repositories.user_repository import UserRepository +user_repo = UserRepository() + +# Future: Postgres repository (minimal code change) +from todo.repositories.postgres_repository import PostgresUserRepository +user_repo = PostgresUserRepository() + +# Same interface, different implementation +user = user_repo.get_by_email("user@example.com") +``` + +## Performance Considerations + +### Synchronous Operations +- **Pros**: Immediate consistency, simple error handling +- **Cons**: Higher latency, potential for MongoDB write failures + +### Batch Operations +- **Pros**: Reduced database round trips, better throughput +- **Cons**: Potential for partial failures + +## Security + +### Data Privacy +- All sensitive data is encrypted in transit +- Postgres connections use SSL +- Access controls are maintained across both databases + +### Audit Trail +- All operations are logged in audit logs +- Sync failures are tracked for compliance +- Data integrity is maintained through transactions + +## Testing + +### Unit Tests +- Test individual components in isolation +- Mock external dependencies +- Verify data transformation logic + +### Integration Tests +- Test end-to-end sync operations +- Verify data consistency between databases +- Test failure scenarios and recovery + +### Performance Tests +- Measure sync latency under load +- Test batch operation efficiency + +## Troubleshooting + +### Common Issues + +1. **Postgres Connection Failures** + - Check database credentials and network connectivity + - Verify Postgres service is running + - Check firewall settings + +2. **Sync Failures** + - Review sync error logs + - Check data transformation logic + - Verify Postgres schema matches expectations + +3. **Performance Issues** + - Monitor sync latency + - Optimize batch operation sizes + - Monitor database performance + +### Debug Commands + +```python +# Enable debug logging +import logging +logging.getLogger('todo.services.dual_write_service').setLevel(logging.DEBUG) + +# Check sync status +status = dual_write_service.get_sync_status('users', str(user_id)) +print(f"Sync status: {status}") + +# Get recent failures +failures = dual_write_service.get_sync_failures() +for failure in failures: + print(f"Collection: {failure['collection']}, ID: {failure['mongo_id']}") +``` + +## Deployment + +### Prerequisites +- PostgreSQL 15+ with appropriate extensions +- MongoDB 7+ (existing) +- Python 3.9+ with required packages + +### Setup Steps +1. Create Postgres database and user +2. Run Django migrations +3. Configure environment variables +4. Verify sync operations + +### Production Considerations +- Use connection pooling for Postgres +- Set up monitoring and alerting +- Implement backup and recovery procedures + +## Support and Maintenance + +### Regular Maintenance +- Monitor sync metrics and failures +- Review and optimize Postgres performance +- Update sync logic as schema evolves +- Clean up old sync failure records + +### Updates and Upgrades +- Test sync operations after schema changes +- Verify data consistency after updates +- Monitor performance impact of changes +- Update documentation as needed diff --git a/production.Dockerfile b/production.Dockerfile new file mode 100644 index 00000000..f47d8754 --- /dev/null +++ b/production.Dockerfile @@ -0,0 +1,61 @@ +# syntax=docker/dockerfile:1 + +# Comments are provided throughout this file to help you get started. +# If you need more help, visit the Dockerfile reference guide at +# https://docs.docker.com/go/dockerfile-reference/ + +# Want to help us make this template better? Share your feedback here: https://forms.gle/ybq9Krt8jtBL3iCk7 + +ARG PYTHON_VERSION=3.12.0 +FROM python:${PYTHON_VERSION}-slim AS base + +# Prevents Python from writing pyc files. +ENV PYTHONDONTWRITEBYTECODE=1 + +# Keeps Python from buffering stdout and stderr to avoid situations where +# the application crashes without emitting any logs due to buffering. +ENV PYTHONUNBUFFERED=1 + +# Set Django settings module +ARG ENV=production +ENV ENV=${ENV} +ENV DJANGO_SETTINGS_MODULE=todo_project.settings.${ENV} + +WORKDIR /app + +# Install CA certificates needed for TLS connections to MongoDB Atlas +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates \ + && rm -rf /var/lib/apt/lists/* + +# Create a non-privileged user that the app will run under. +# See https://docs.docker.com/go/dockerfile-user-best-practices/ +ARG UID=10001 +RUN adduser \ + --disabled-password \ + --gecos "" \ + --home "/nonexistent" \ + --shell "/sbin/nologin" \ + --no-create-home \ + --uid "${UID}" \ + appuser + +# Download dependencies as a separate step to take advantage of Docker's caching. +# Leverage a cache mount to /root/.cache/pip to speed up subsequent builds. +# Leverage a bind mount to requirements.txt to avoid having to copy them into +# into this layer. +RUN --mount=type=cache,target=/root/.cache/pip \ + --mount=type=bind,source=requirements.txt,target=requirements.txt \ + python -m pip install -r requirements.txt + +# Switch to the non-privileged user to run the application. +USER appuser + +# Copy the source code into the container. +COPY . . + +# Expose the port that the application listens on. +EXPOSE 8000 + +# Run the application. +CMD ["sh", "-c", "python manage.py migrate --noinput && gunicorn todo_project.wsgi --bind 0.0.0.0:8000"] \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 1568474e..12cf6c98 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,6 +46,9 @@ ignore = [] fixable = ["ALL"] unfixable = [] +[tool.ruff.lint.per-file-ignores] +"todo_project/settings/*.py" = ["F403", "F405"] + [tool.ruff.format] # Like Black, use double quotes for strings. quote-style = "double" diff --git a/requirements.txt b/requirements.txt index e995bd55..15450069 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,9 +7,10 @@ Django==5.1.5 djangorestframework==3.15.2 dnspython==2.7.0 filelock==3.16.1 +gunicorn==23.0.0 identify==2.6.1 nodeenv==1.9.1 -platformdirs==4.3.6 +platformdirs==4.3.8 pydantic==2.10.1 pydantic_core==2.27.1 pymongo==4.10.1 @@ -19,3 +20,12 @@ ruff==0.7.1 sqlparse==0.5.1 typing_extensions==4.12.2 virtualenv==20.27.0 +django-cors-headers==4.7.0 +cryptography==45.0.3 +PyJWT==2.10.1 +requests==2.32.3 +email-validator==2.2.0 +testcontainers[mongodb]==4.10.0 +drf-spectacular==0.28.0 +debugpy==1.8.14 +psycopg2-binary==2.9.9 diff --git a/schema.yaml b/schema.yaml new file mode 100644 index 00000000..a32ac959 --- /dev/null +++ b/schema.yaml @@ -0,0 +1,1955 @@ +openapi: 3.0.3 +info: + title: Todo API + version: 1.0.0 + description: A comprehensive Todo API with authentication and task management + contact: + name: API Support + email: support@example.com + license: + name: MIT License + url: https://opensource.org/licenses/MIT +paths: + /v1/auth/google/callback: + get: + operationId: google_callback + description: Processes the OAuth callback from Google and creates/updates user + account + summary: Handle Google OAuth callback + parameters: + - in: query + name: code + schema: + type: string + description: Authorization code from Google + required: true + - in: query + name: error + schema: + type: string + description: Error from Google OAuth + - in: query + name: state + schema: + type: string + description: State parameter for CSRF protection + required: true + tags: + - auth + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: OAuth callback processed successfully + '400': + description: Bad request - invalid parameters + '500': + description: Internal server error + /v1/auth/google/login: + get: + operationId: google_login + description: Redirects to Google OAuth authorization URL or returns JSON response + with auth URL + summary: Initiate Google OAuth login + parameters: + - in: query + name: format + schema: + type: string + description: 'Response format: ''json'' for JSON response, otherwise redirects' + - in: query + name: redirectURL + schema: + type: string + description: URL to redirect after successful authentication + tags: + - auth + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: Google OAuth URL generated successfully + '302': + description: Redirect to Google OAuth URL + /v1/auth/logout: + post: + operationId: google_logout_post + description: Logout the user by clearing authentication cookies (POST method) + summary: Logout user (POST) + tags: + - auth + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: Logout successful + /v1/health: + get: + operationId: health_check + description: Check the health status of the application and its components + summary: Health check + tags: + - health + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: Application is healthy + '503': + description: Application is unhealthy + /v1/labels: + get: + operationId: labels_retrieve + description: Retrieve a paginated list of labels. + tags: + - labels + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: No response body + /v1/roles: + get: + operationId: get_roles + description: Retrieve all roles with optional filtering + summary: Get all roles + parameters: + - in: query + name: is_active + schema: + type: boolean + description: Filter by active status + - in: query + name: name + schema: + type: string + description: Filter by role name + - in: query + name: scope + schema: + type: string + description: Filter by role scope (GLOBAL/TEAM) + tags: + - roles + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: Roles retrieved successfully + '400': + description: Bad request + '500': + description: Internal server error + post: + operationId: create_role + description: Create a new role with the provided details + summary: Create a new role + tags: + - roles + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CreateRoleRequest' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/CreateRoleRequest' + multipart/form-data: + schema: + $ref: '#/components/schemas/CreateRoleRequest' + required: true + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '201': + description: Role created successfully + '400': + description: Bad request + '409': + description: Role already exists + '500': + description: Internal server error + /v1/roles/{role_id}: + get: + operationId: get_role_by_id + description: Retrieve a single role by its unique identifier + summary: Get role by ID + parameters: + - in: path + name: role_id + schema: + type: string + description: Unique identifier of the role + required: true + tags: + - roles + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: Role retrieved successfully + '404': + description: Role not found + '500': + description: Internal server error + patch: + operationId: update_role + description: Update an existing role with the provided details + summary: Update role + parameters: + - in: path + name: role_id + schema: + type: string + description: Unique identifier of the role + required: true + tags: + - roles + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/PatchedUpdateRoleRequest' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/PatchedUpdateRoleRequest' + multipart/form-data: + schema: + $ref: '#/components/schemas/PatchedUpdateRoleRequest' + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: Role updated successfully + '400': + description: Bad request + '404': + description: Role not found + '409': + description: Role name already exists + '500': + description: Internal server error + delete: + operationId: delete_role + description: Delete a role by its unique identifier + summary: Delete role + parameters: + - in: path + name: role_id + schema: + type: string + description: Unique identifier of the role to delete + required: true + tags: + - roles + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '204': + description: Role deleted successfully + '404': + description: Role not found + '500': + description: Internal server error + /v1/task-assignments: + post: + operationId: create_task_assignment + description: Assign a task to either a user or a team. The system will validate + that both the task and assignee exist before creating the assignment. + summary: Assign task to user or team + tags: + - task-assignments + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CreateTaskAssignmentRequest' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/CreateTaskAssignmentRequest' + multipart/form-data: + schema: + $ref: '#/components/schemas/CreateTaskAssignmentRequest' + required: true + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/CreateTaskAssignmentResponse' + description: Task assignment created successfully + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Bad request - validation error or assignee not found + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Task not found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Internal server error + /v1/task-assignments/{task_id}: + get: + operationId: get_task_assignment + description: Retrieve the assignment details for a specific task + summary: Get task assignment by task ID + parameters: + - in: path + name: task_id + schema: + type: string + description: Unique identifier of the task + required: true + tags: + - task-assignments + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/CreateTaskAssignmentResponse' + description: Task assignment retrieved successfully + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Task assignment not found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Internal server error + patch: + operationId: set_executor_for_team_task + description: Allows the SPOC of a team to set or update the executor (user within + the team) for a team-assigned task. All SPOC re-assignments are logged in + the audit trail. + summary: Set or update executor for a team-assigned task (SPOC only) + parameters: + - in: path + name: task_id + schema: + type: string + description: Unique identifier of the task + required: true + tags: + - task-assignments + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/PatchedExecutorUpdateRequest' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/PatchedExecutorUpdateRequest' + multipart/form-data: + schema: + $ref: '#/components/schemas/PatchedExecutorUpdateRequest' + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: Executor updated successfully + '403': + description: Forbidden - only SPOC can update executor for team task + '404': + description: Task assignment not found + '500': + description: Internal server error + delete: + operationId: delete_task_assignment + description: Remove the assignment for a specific task + summary: Delete task assignment + parameters: + - in: path + name: task_id + schema: + type: string + description: Unique identifier of the task + required: true + tags: + - task-assignments + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '204': + description: Task assignment deleted successfully + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Task assignment not found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Internal server error + /v1/tasks: + get: + operationId: get_tasks + description: 'Retrieve a paginated list of tasks with optional filtering and + sorting. Each task now includes an ''in_watchlist'' property indicating the + watchlist status: true if actively watched, false if in watchlist but inactive, + or null if not in watchlist.' + summary: Get paginated list of tasks + parameters: + - in: query + name: limit + schema: + type: integer + description: Number of tasks per page + - in: query + name: page + schema: + type: integer + description: Page number for pagination + - in: query + name: teamId + schema: + type: string + description: If provided, filters tasks assigned to this team. + tags: + - tasks + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/GetTasksResponse' + description: Successful response + '400': + description: Bad request + '500': + description: Internal server error + post: + operationId: create_task + description: Create a new task with the provided details + summary: Create a new task + tags: + - tasks + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CreateTaskRequest' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/CreateTaskRequest' + multipart/form-data: + schema: + $ref: '#/components/schemas/CreateTaskRequest' + required: true + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '201': + description: Task created successfully + '400': + description: Bad request + '500': + description: Internal server error + /v1/tasks/{task_id}: + get: + operationId: get_task_by_id + description: Retrieve a single task by its unique identifier + summary: Get task by ID + parameters: + - in: path + name: task_id + schema: + type: string + description: Unique identifier of the task + required: true + tags: + - tasks + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: Task retrieved successfully + '404': + description: Task not found + '500': + description: Internal server error + patch: + operationId: update_task + description: Partially update a task or defer it based on the action parameter + summary: Update or defer task + parameters: + - in: query + name: action + schema: + type: string + description: 'Action to perform: ''update'' or ''defer''' + - in: path + name: task_id + schema: + type: string + description: Unique identifier of the task + required: true + tags: + - tasks + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/PatchedUpdateTaskRequest' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/PatchedUpdateTaskRequest' + multipart/form-data: + schema: + $ref: '#/components/schemas/PatchedUpdateTaskRequest' + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: Task updated successfully + '400': + description: Bad request + '404': + description: Task not found + '500': + description: Internal server error + delete: + operationId: delete_task + description: Delete a task by its unique identifier + summary: Delete task + parameters: + - in: path + name: task_id + schema: + type: string + description: Unique identifier of the task to delete + required: true + tags: + - tasks + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '204': + description: Task deleted successfully + '404': + description: Task not found + '500': + description: Internal server error + /v1/teams: + get: + operationId: teams_retrieve + description: Get all teams assigned to the authenticated user. + tags: + - teams + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: No response body + post: + operationId: create_team + description: Create a new team with the provided details. The creator is always + added as a member, even if not in member_ids or as POC. + summary: Create a new team + tags: + - teams + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CreateTeamRequest' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/CreateTeamRequest' + multipart/form-data: + schema: + $ref: '#/components/schemas/CreateTeamRequest' + required: true + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/CreateTeamResponse' + description: Team created successfully + '400': + description: Bad request - validation error + '500': + description: Internal server error + /v1/teams/{team_id}: + get: + operationId: get_team_by_id + description: Retrieve a single team by its unique identifier. Optionally, set + ?member=true to get users belonging to this team. + summary: Get team by ID + parameters: + - in: query + name: member + schema: + type: boolean + description: If true, returns users that belong to this team instead of team + details. + - in: path + name: team_id + schema: + type: string + description: Unique identifier of the team + required: true + tags: + - teams + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: Team or team members retrieved successfully + '404': + description: Team not found + '500': + description: Internal server error + patch: + operationId: update_team + description: 'Update a team''s details including name, description, point of + contact (POC), and team members. All fields are optional - only include the + fields you want to update. For member management: if member_ids is provided, + it completely replaces the current team members; if member_ids is not provided, + existing members remain unchanged.' + summary: Update team by ID + parameters: + - in: path + name: team_id + schema: + type: string + description: Unique identifier of the team + required: true + tags: + - teams + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/PatchedUpdateTeamRequest' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/PatchedUpdateTeamRequest' + multipart/form-data: + schema: + $ref: '#/components/schemas/PatchedUpdateTeamRequest' + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/TeamDTO' + description: Team updated successfully + '400': + description: Bad request - validation error or invalid member IDs + '404': + description: Team not found + '500': + description: Internal server error + /v1/teams/{team_id}/members: + post: + operationId: add_team_members + description: Add new members to a team. Only existing team members can add other + members. + summary: Add members to a team + parameters: + - in: path + name: team_id + schema: + type: string + description: Unique identifier of the team + required: true + tags: + - teams + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/AddTeamMemberRequest' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/AddTeamMemberRequest' + multipart/form-data: + schema: + $ref: '#/components/schemas/AddTeamMemberRequest' + required: true + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/TeamDTO' + description: Team members added successfully + '400': + description: Bad request - validation error or user not a team member + '404': + description: Team not found + '500': + description: Internal server error + /v1/teams/join-by-invite: + post: + operationId: join_team_by_invite_code + description: Join a team using a valid invite code. Returns the joined team + details. + summary: Join a team by invite code + tags: + - teams + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/JoinTeamByInviteCodeRequest' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/JoinTeamByInviteCodeRequest' + multipart/form-data: + schema: + $ref: '#/components/schemas/JoinTeamByInviteCodeRequest' + required: true + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/TeamDTO' + description: Joined team successfully + '400': + description: Bad request - validation error or already a member + '404': + description: Team not found or invalid invite code + '500': + description: Internal server error + /v1/users: + get: + operationId: get_users + description: Get user profile details or search users with fuzzy search. Use + 'profile=true' to get current user details, or use search parameter to find + users. + summary: Get users with search and pagination + parameters: + - in: query + name: limit + schema: + type: integer + description: 'Number of results per page (default: 10, max: 100)' + - in: query + name: page + schema: + type: integer + description: 'Page number for pagination (default: 1)' + - in: query + name: profile + schema: + type: string + description: Set to 'true' to get current user profile + - in: query + name: search + schema: + type: string + description: Search query for name or email (fuzzy search) + tags: + - users + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/UserSearchResponseDTO' + description: '' + '204': + description: No users found + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: '' + '400': + description: Bad request - invalid parameters + '404': + description: Route does not exist + '500': + description: Internal server error + /v1/watchlist/tasks: + get: + operationId: get_watchlist_tasks + description: Retrieve a paginated list of tasks that are added to the authenticated + user's watchlist. + summary: Get paginated list of watchlisted tasks + parameters: + - in: query + name: limit + schema: + type: integer + description: 'Number of tasks per page (default: 10, max: 100)' + - in: query + name: page + schema: + type: integer + description: 'Page number for pagination (default: 1)' + tags: + - watchlist + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/GetWatchlistTasksResponse' + description: Paginated list of watchlisted tasks returned successfully + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Bad request - validation error + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Internal server error + post: + operationId: add_task_to_watchlist + description: Add a task to the authenticated user's watchlist. + summary: Add a task to the watchlist + tags: + - watchlist + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CreateWatchlistRequest' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/CreateWatchlistRequest' + multipart/form-data: + schema: + $ref: '#/components/schemas/CreateWatchlistRequest' + required: true + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/CreateWatchlistResponse' + description: Task added to watchlist successfully + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Bad request - validation error or already in watchlist + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Internal server error + /v1/watchlist/tasks/{task_id}: + patch: + operationId: update_watchlist_task + description: Update the isActive status of a task in the authenticated user's + watchlist. This allows users to activate or deactivate watching a specific + task. + summary: Update watchlist status of a task + parameters: + - in: path + name: task_id + schema: + type: string + description: Unique identifier of the task to update in the watchlist + required: true + tags: + - watchlist + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/PatchedUpdateWatchlistRequest' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/PatchedUpdateWatchlistRequest' + multipart/form-data: + schema: + $ref: '#/components/schemas/PatchedUpdateWatchlistRequest' + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: Watchlist task status updated successfully + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Bad request - validation error + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Task not found in watchlist + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Internal server error + /v1/watchlist/tasks/check: + get: + operationId: check_task_in_watchlist + description: 'Returns the watchlist status for the given task_id: true if actively + watched, false if in watchlist but inactive, or null if not in watchlist.' + summary: Check if a task is in the user's watchlist + parameters: + - in: query + name: task_id + schema: + type: string + description: Task ID to check + required: true + tags: + - watchlist + security: + - cookieAuth: [] + - basicAuth: [] + - {} + responses: + '200': + description: 'Returns { ''in_watchlist'': true/false/null }' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Bad request - validation error + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/ApiErrorResponse' + description: Unauthorized +components: + schemas: + AddTeamMemberRequest: + type: object + properties: + member_ids: + type: array + items: + type: string + minLength: 1 + description: List of user IDs to add to the team + minItems: 1 + required: + - member_ids + ApiErrorDetail: + properties: + source: + anyOf: + - additionalProperties: + type: string + propertyNames: + enum: + - parameter + - pointer + - header + - path + type: object + - type: 'null' + default: null + title: Source + title: + anyOf: + - type: string + - type: 'null' + default: null + title: Title + detail: + anyOf: + - type: string + - type: 'null' + default: null + title: Detail + title: ApiErrorDetail + type: object + ApiErrorResponse: + properties: + statusCode: + title: Statuscode + type: integer + message: + title: Message + type: string + errors: + items: + $ref: '#/components/schemas/ApiErrorDetail' + title: Errors + type: array + authenticated: + anyOf: + - type: boolean + - type: 'null' + default: null + title: Authenticated + required: + - statusCode + - message + - errors + title: ApiErrorResponse + type: object + AssigneeInfoDTO: + properties: + id: + title: Id + type: string + name: + title: Name + type: string + relation_type: + allOf: + - $ref: '#/components/schemas/RelationTypeEnum' + title: Relation Type + is_action_taken: + title: Is Action Taken + type: boolean + is_active: + title: Is Active + type: boolean + required: + - id + - name + - relation_type + - is_action_taken + - is_active + title: AssigneeInfoDTO + type: object + CreateRoleRequest: + type: object + properties: + name: + type: string + minLength: 1 + maxLength: 100 + description: + type: string + maxLength: 500 + scope: + allOf: + - $ref: '#/components/schemas/ScopeEnum' + default: GLOBAL + is_active: + type: boolean + default: true + required: + - name + CreateTaskAssignmentRequest: + type: object + properties: + task_id: + type: string + minLength: 1 + assignee_id: + type: string + minLength: 1 + user_type: + allOf: + - $ref: '#/components/schemas/UserTypeEnum' + description: |- + Type of assignee: 'user' or 'team' + + * `user` - user + * `team` - team + required: + - assignee_id + - task_id + - user_type + CreateTaskAssignmentResponse: + properties: + data: + $ref: '#/components/schemas/TaskAssignmentResponseDTO' + message: + default: Task assignment created successfully + title: Message + type: string + required: + - data + title: CreateTaskAssignmentResponse + type: object + CreateTaskRequest: + type: object + properties: + title: + type: string + minLength: 1 + description: + type: string + nullable: true + priority: + allOf: + - $ref: '#/components/schemas/PriorityEnum' + default: LOW + status: + allOf: + - $ref: '#/components/schemas/StatusEnum' + default: TODO + assignee: + type: object + additionalProperties: {} + nullable: true + labels: + type: array + items: + type: string + minLength: 1 + dueAt: + type: string + format: date-time + nullable: true + required: + - title + CreateTeamRequest: + type: object + description: The poc_id represents the team's point of contact and is optional. + properties: + name: + type: string + minLength: 1 + maxLength: 100 + description: + type: string + maxLength: 500 + member_ids: + type: array + items: + type: string + minLength: 1 + poc_id: + type: string + nullable: true + required: + - name + CreateTeamResponse: + description: |- + Response model for team creation endpoint. + + Attributes: + team: The newly created team details + message: Success or status message from the operation + properties: + team: + $ref: '#/components/schemas/TeamDTO' + message: + title: Message + type: string + required: + - team + - message + title: CreateTeamResponse + type: object + CreateWatchlistDTO: + properties: + taskId: + title: Taskid + type: string + userId: + title: Userid + type: string + isActive: + default: true + title: Isactive + type: boolean + createdAt: + anyOf: + - format: date-time + type: string + - type: 'null' + default: null + title: Createdat + createdBy: + anyOf: + - type: string + - type: 'null' + default: null + title: Createdby + updatedAt: + anyOf: + - format: date-time + type: string + - type: 'null' + default: null + title: Updatedat + updatedBy: + anyOf: + - type: string + - type: 'null' + default: null + title: Updatedby + required: + - taskId + - userId + title: CreateWatchlistDTO + type: object + CreateWatchlistRequest: + type: object + properties: + taskId: + type: string + minLength: 1 + required: + - taskId + CreateWatchlistResponse: + properties: + statusCode: + default: 201 + title: Statuscode + type: integer + successMessage: + default: Task added to watchlist successfully + title: Successmessage + type: string + data: + $ref: '#/components/schemas/CreateWatchlistDTO' + required: + - data + title: CreateWatchlistResponse + type: object + DeferredDetailsDTO: + properties: + deferredAt: + format: date-time + title: Deferredat + type: string + deferredTill: + format: date-time + title: Deferredtill + type: string + deferredBy: + $ref: '#/components/schemas/UserDTO' + required: + - deferredAt + - deferredTill + - deferredBy + title: DeferredDetailsDTO + type: object + GetTasksResponse: + properties: + links: + anyOf: + - $ref: '#/components/schemas/LinksData' + - type: 'null' + default: null + error: + anyOf: + - type: object + - type: 'null' + default: null + title: Error + tasks: + default: [] + items: + $ref: '#/components/schemas/TaskDTO' + title: Tasks + type: array + title: GetTasksResponse + type: object + GetWatchlistTasksResponse: + properties: + links: + anyOf: + - $ref: '#/components/schemas/LinksData' + - type: 'null' + default: null + error: + anyOf: + - type: object + - type: 'null' + default: null + title: Error + tasks: + default: [] + items: + $ref: '#/components/schemas/WatchlistDTO' + title: Tasks + type: array + title: GetWatchlistTasksResponse + type: object + JoinTeamByInviteCodeRequest: + type: object + properties: + invite_code: + type: string + minLength: 1 + maxLength: 100 + required: + - invite_code + LabelDTO: + properties: + id: + title: Id + type: string + name: + title: Name + type: string + color: + title: Color + type: string + createdAt: + anyOf: + - format: date-time + type: string + - type: 'null' + default: null + title: Createdat + updatedAt: + anyOf: + - format: date-time + type: string + - type: 'null' + default: null + title: Updatedat + createdBy: + anyOf: + - $ref: '#/components/schemas/UserDTO' + - type: 'null' + default: null + updatedBy: + anyOf: + - $ref: '#/components/schemas/UserDTO' + - type: 'null' + default: null + required: + - id + - name + - color + title: LabelDTO + type: object + LinksData: + properties: + next: + anyOf: + - type: string + - type: 'null' + default: null + title: Next + prev: + anyOf: + - type: string + - type: 'null' + default: null + title: Prev + title: LinksData + type: object + NullEnum: + enum: + - null + PatchedExecutorUpdateRequest: + type: object + properties: + executor_id: + type: string + minLength: 1 + description: User ID of the new executor (must be a member of the team) + PatchedUpdateRoleRequest: + type: object + properties: + name: + type: string + minLength: 1 + maxLength: 100 + description: + type: string + maxLength: 500 + scope: + $ref: '#/components/schemas/ScopeEnum' + is_active: + type: boolean + PatchedUpdateTaskRequest: + type: object + properties: + title: + type: string + maxLength: 255 + description: + type: string + nullable: true + priority: + nullable: true + oneOf: + - $ref: '#/components/schemas/PriorityEnum' + - $ref: '#/components/schemas/NullEnum' + status: + nullable: true + oneOf: + - $ref: '#/components/schemas/StatusEnum' + - $ref: '#/components/schemas/NullEnum' + assignee: + type: object + additionalProperties: {} + nullable: true + labels: + type: array + items: + type: string + minLength: 1 + nullable: true + dueAt: + type: string + format: date-time + nullable: true + startedAt: + type: string + format: date-time + nullable: true + isAcknowledged: + type: boolean + PatchedUpdateTeamRequest: + type: object + description: |- + Serializer for updating team details. + All fields are optional for PATCH operations. + properties: + name: + type: string + minLength: 1 + maxLength: 100 + description: + type: string + nullable: true + maxLength: 500 + poc_id: + type: string + nullable: true + minLength: 1 + member_ids: + type: array + items: + type: string + minLength: 1 + PatchedUpdateWatchlistRequest: + type: object + properties: + isActive: + type: boolean + PriorityEnum: + enum: + - HIGH + - MEDIUM + - LOW + type: string + description: |- + * `HIGH` - HIGH + * `MEDIUM` - MEDIUM + * `LOW` - LOW + RelationTypeEnum: + enum: + - team + - user + type: string + ScopeEnum: + enum: + - GLOBAL + - TEAM + type: string + description: |- + * `GLOBAL` - Global + * `TEAM` - Team + StatusEnum: + enum: + - TODO + - IN_PROGRESS + - DEFERRED + - BLOCKED + - DONE + type: string + description: |- + * `TODO` - TODO + * `IN_PROGRESS` - IN_PROGRESS + * `DEFERRED` - DEFERRED + * `BLOCKED` - BLOCKED + * `DONE` - DONE + TaskAssignmentResponseDTO: + properties: + id: + title: Id + type: string + task_id: + title: Task Id + type: string + assignee_id: + title: Assignee Id + type: string + user_type: + allOf: + - $ref: '#/components/schemas/UserTypeEnum' + title: User Type + assignee_name: + title: Assignee Name + type: string + is_active: + title: Is Active + type: boolean + created_by: + title: Created By + type: string + updated_by: + anyOf: + - type: string + - type: 'null' + default: null + title: Updated By + created_at: + format: date-time + title: Created At + type: string + updated_at: + anyOf: + - format: date-time + type: string + - type: 'null' + default: null + title: Updated At + required: + - id + - task_id + - assignee_id + - user_type + - assignee_name + - is_active + - created_by + - created_at + title: TaskAssignmentResponseDTO + type: object + TaskDTO: + properties: + id: + title: Id + type: string + displayId: + title: Displayid + type: string + title: + title: Title + type: string + description: + anyOf: + - type: string + - type: 'null' + default: null + title: Description + priority: + anyOf: + - $ref: '#/components/schemas/TaskPriority' + - type: 'null' + default: null + status: + anyOf: + - $ref: '#/components/schemas/TaskStatus' + - type: 'null' + default: null + assignee: + anyOf: + - $ref: '#/components/schemas/AssigneeInfoDTO' + - type: 'null' + default: null + isAcknowledged: + anyOf: + - type: boolean + - type: 'null' + default: null + title: Isacknowledged + labels: + default: [] + items: + $ref: '#/components/schemas/LabelDTO' + title: Labels + type: array + startedAt: + anyOf: + - format: date-time + type: string + - type: 'null' + default: null + title: Startedat + dueAt: + anyOf: + - format: date-time + type: string + - type: 'null' + default: null + title: Dueat + deferredDetails: + anyOf: + - $ref: '#/components/schemas/DeferredDetailsDTO' + - type: 'null' + default: null + in_watchlist: + anyOf: + - type: boolean + - type: 'null' + default: null + title: In Watchlist + createdAt: + format: date-time + title: Createdat + type: string + updatedAt: + anyOf: + - format: date-time + type: string + - type: 'null' + default: null + title: Updatedat + createdBy: + $ref: '#/components/schemas/UserDTO' + updatedBy: + anyOf: + - $ref: '#/components/schemas/UserDTO' + - type: 'null' + default: null + required: + - id + - displayId + - title + - createdAt + - createdBy + title: TaskDTO + type: object + TaskPriority: + enum: + - 1 + - 2 + - 3 + title: TaskPriority + type: integer + TaskStatus: + enum: + - TODO + - IN_PROGRESS + - DEFERRED + - BLOCKED + - DONE + title: TaskStatus + type: string + TeamDTO: + properties: + id: + title: Id + type: string + name: + title: Name + type: string + description: + anyOf: + - type: string + - type: 'null' + default: null + title: Description + poc_id: + anyOf: + - type: string + - type: 'null' + default: null + title: Poc Id + invite_code: + title: Invite Code + type: string + created_by: + title: Created By + type: string + updated_by: + title: Updated By + type: string + created_at: + format: date-time + title: Created At + type: string + updated_at: + format: date-time + title: Updated At + type: string + users: + anyOf: + - items: {} + type: array + - type: 'null' + default: null + title: Users + required: + - id + - name + - invite_code + - created_by + - updated_by + - created_at + - updated_at + title: TeamDTO + type: object + UserDTO: + properties: + id: + title: Id + type: string + name: + title: Name + type: string + addedOn: + anyOf: + - format: date-time + type: string + - type: 'null' + default: null + title: Addedon + tasksAssignedCount: + anyOf: + - type: integer + - type: 'null' + default: null + title: Tasksassignedcount + required: + - id + - name + title: UserDTO + type: object + UserSearchDTO: + properties: + id: + title: Id + type: string + name: + title: Name + type: string + email_id: + title: Email Id + type: string + created_at: + format: date-time + title: Created At + type: string + updated_at: + anyOf: + - format: date-time + type: string + - type: 'null' + default: null + title: Updated At + required: + - id + - name + - email_id + - created_at + title: UserSearchDTO + type: object + UserSearchResponseDTO: + properties: + users: + items: + $ref: '#/components/schemas/UserSearchDTO' + title: Users + type: array + total_count: + title: Total Count + type: integer + page: + title: Page + type: integer + limit: + title: Limit + type: integer + required: + - users + - total_count + - page + - limit + title: UserSearchResponseDTO + type: object + UserTypeEnum: + enum: + - user + - team + type: string + description: |- + * `user` - user + * `team` - team + WatchlistDTO: + properties: + taskId: + title: Taskid + type: string + displayId: + title: Displayid + type: string + title: + title: Title + type: string + description: + anyOf: + - type: string + - type: 'null' + default: null + title: Description + priority: + anyOf: + - type: integer + - type: 'null' + default: null + title: Priority + status: + anyOf: + - type: string + - type: 'null' + default: null + title: Status + isAcknowledged: + anyOf: + - type: boolean + - type: 'null' + default: null + title: Isacknowledged + isDeleted: + anyOf: + - type: boolean + - type: 'null' + default: null + title: Isdeleted + labels: + default: [] + items: {} + title: Labels + type: array + dueAt: + anyOf: + - format: date-time + type: string + - type: 'null' + default: null + title: Dueat + createdAt: + format: date-time + title: Createdat + type: string + createdBy: + title: Createdby + type: string + watchlistId: + title: Watchlistid + type: string + required: + - taskId + - displayId + - title + - createdAt + - createdBy + - watchlistId + title: WatchlistDTO + type: object + securitySchemes: + basicAuth: + type: http + scheme: basic + cookieAuth: + type: apiKey + in: cookie + name: sessionid +servers: +- url: http://localhost:8000 + description: Development server +tags: +- name: tasks + description: Task management operations +- name: auth + description: Authentication operations +- name: health + description: Health check endpoints +externalDocs: + description: Find more info here + url: https://github.com/your-repo/todo-backend diff --git a/todo/constants/messages.py b/todo/constants/messages.py index 8691e5a1..9f5ec965 100644 --- a/todo/constants/messages.py +++ b/todo/constants/messages.py @@ -1,11 +1,25 @@ # Application Messages class AppMessages: TASK_CREATED = "Task created successfully" - + TEAM_CREATED = "Team created successfully" + GOOGLE_LOGIN_SUCCESS = "Successfully logged in with Google" + GOOGLE_LOGOUT_SUCCESS = "Successfully logged out" + TOKEN_REFRESHED = "Access token refreshed successfully" + USERS_SEARCHED_SUCCESS = "Users searched successfully" + WATCHLIST_CREATED = "Task added to watchlist successfully" + + # Repository error messages class RepositoryErrors: TASK_CREATION_FAILED = "Failed to create task: {0}" + TEAM_CREATION_FAILED = "Failed to create team: {0}" DB_INIT_FAILED = "Failed to initialize database: {0}" + USER_NOT_FOUND = "User not found: {0}" + USER_OPERATION_FAILED = "User operation failed" + USER_CREATE_UPDATE_FAILED = "User create/update failed: {0}" + USER_SEARCH_FAILED = "User search failed: {0}" + WATCHLIST_CREATION_FAILED = "Failed to add task to watchlist: {0}" + # API error messages class ApiErrors: @@ -18,13 +32,69 @@ class ApiErrors: INVALID_LABEL_IDS = "Invalid Label IDs" PAGE_NOT_FOUND = "Requested page exceeds available results" UNEXPECTED_ERROR_OCCURRED = "An unexpected error occurred" + TASK_NOT_FOUND = "Task with ID {0} not found." + TASK_NOT_FOUND_GENERIC = "Task not found." + TASK_NOT_FOUND_TITLE = "Task Not Found" + INVALID_TASK_ID = "Invalid task ID format" + RESOURCE_NOT_FOUND_TITLE = "Resource Not Found" + GOOGLE_AUTH_FAILED = "Google authentication failed" + GOOGLE_API_ERROR = "Google API error" + INVALID_AUTH_CODE = "Invalid authorization code" + TOKEN_EXCHANGE_FAILED = "Failed to exchange authorization code" + MISSING_USER_INFO_FIELDS = "Missing user info fields: {0}" + USER_INFO_FETCH_FAILED = "Failed to get user info: {0}" + OAUTH_INITIALIZATION_FAILED = "OAuth initialization failed: {0}" + AUTHENTICATION_FAILED = "Authentication failed: {0}" + INVALID_STATE_PARAMETER = "Invalid state parameter" + TOKEN_REFRESH_FAILED = "Token refresh failed: {0}" + LOGOUT_FAILED = "Logout failed: {0}" + STATE_CONFLICT_TITLE = "State Conflict" + UNAUTHORIZED_TITLE = "You are not authorized to perform this action" + USER_NOT_FOUND = "User with ID {0} not found." + USER_NOT_FOUND_GENERIC = "User not found." + SEARCH_QUERY_EMPTY = "Search query cannot be empty" + TASK_ALREADY_IN_WATCHLIST = "Task is already in the watchlist" + # Validation error messages class ValidationErrors: BLANK_TITLE = "Title must not be blank." INVALID_OBJECT_ID = "{0} is not a valid ObjectId." PAST_DUE_DATE = "Due date must be in the future." + REQUIRED_TIMEZONE = "Timezone is required if dueAt is provided." + INVALID_TIMEZONE = "Invalid timezone." + PAST_DEFERRED_TILL_DATE = "deferredTill cannot be in the past." + CANNOT_DEFER_TOO_CLOSE_TO_DUE_DATE = "Cannot defer task too close to the due date." + CANNOT_DEFER_A_DONE_TASK = "Cannot defer a task that is already marked as done." PAGE_POSITIVE = "Page must be a positive integer" LIMIT_POSITIVE = "Limit must be a positive integer" MAX_LIMIT_EXCEEDED = "Maximum limit of {0} exceeded" - MISSING_LABEL_IDS = "The following label IDs do not exist: {0}" \ No newline at end of file + INVALID_SEARCH_QUERY_TYPE = "Search query must be a string." + MISSING_LABEL_IDS = "The following label ID(s) do not exist: {0}." + INVALID_TASK_ID_FORMAT = "Please enter a valid Task ID format." + UNSUPPORTED_ACTION = "Unsupported action '{0}'." + FUTURE_STARTED_AT = "The start date cannot be set in the future." + INVALID_LABELS_STRUCTURE = "Labels must be provided as a list or tuple of ObjectId strings." + MISSING_GOOGLE_ID = "Google ID is required" + MISSING_EMAIL = "Email is required" + MISSING_NAME = "Name is required" + MISSING_PICTURE = "Picture is required" + SEARCH_QUERY_EMPTY = "Search query cannot be empty" + TASK_ID_STRING_REQUIRED = "Task ID must be a string." + INVALID_IS_ACTIVE_VALUE = "Invalid value for is_active" + + +# Auth messages +class AuthErrorMessages: + TOKEN_MISSING = "Authentication token is required" + TOKEN_EXPIRED = "Authentication token has expired" + TOKEN_INVALID = "Invalid authentication token" + AUTHENTICATION_REQUIRED = "Authentication required" + TOKEN_EXPIRED_TITLE = "Token Expired" + INVALID_TOKEN_TITLE = "Invalid Token" + TOKEN_EXPIRED = "Access token has expired" + REFRESH_TOKEN_EXPIRED = "Refresh token has expired, please login again" + TOKEN_INVALID = "Invalid token" + MISSING_REQUIRED_PARAMETER = "Missing required parameter: {0}" + NO_ACCESS_TOKEN = "No access token" + NO_REFRESH_TOKEN = "No refresh token found" diff --git a/todo/constants/role.py b/todo/constants/role.py new file mode 100644 index 00000000..e265eeff --- /dev/null +++ b/todo/constants/role.py @@ -0,0 +1,29 @@ +from enum import Enum + + +class RoleScope(Enum): + GLOBAL = "GLOBAL" + TEAM = "TEAM" + + +class RoleName(Enum): + MODERATOR = "moderator" + OWNER = "owner" + ADMIN = "admin" + MEMBER = "member" + + +GLOBAL_ROLES = [RoleName.MODERATOR.value] +TEAM_ROLES = [RoleName.OWNER.value, RoleName.ADMIN.value, RoleName.MEMBER.value] + +DEFAULT_TEAM_ROLE = RoleName.MEMBER.value + +ROLE_SCOPE_CHOICES = [ + (RoleScope.GLOBAL.value, "Global"), + (RoleScope.TEAM.value, "Team"), +] + +VALID_ROLE_NAMES_BY_SCOPE = { + RoleScope.GLOBAL.value: GLOBAL_ROLES, + RoleScope.TEAM.value: TEAM_ROLES, +} diff --git a/todo/constants/task.py b/todo/constants/task.py index 0752fe20..cd17f7ee 100644 --- a/todo/constants/task.py +++ b/todo/constants/task.py @@ -13,3 +13,37 @@ class TaskPriority(Enum): HIGH = 1 MEDIUM = 2 LOW = 3 + + +SORT_FIELD_PRIORITY = "priority" +SORT_FIELD_DUE_AT = "dueAt" +SORT_FIELD_CREATED_AT = "createdAt" +SORT_FIELD_UPDATED_AT = "updatedAt" +SORT_FIELD_ASSIGNEE = "assignee" + +SORT_FIELDS = [ + SORT_FIELD_PRIORITY, + SORT_FIELD_DUE_AT, + SORT_FIELD_CREATED_AT, + SORT_FIELD_UPDATED_AT, + SORT_FIELD_ASSIGNEE, +] + +SORT_ORDER_ASC = "asc" +SORT_ORDER_DESC = "desc" + +SORT_ORDERS = [ + SORT_ORDER_ASC, + SORT_ORDER_DESC, +] + + +SORT_FIELD_DEFAULT_ORDERS = { + SORT_FIELD_CREATED_AT: SORT_ORDER_DESC, + SORT_FIELD_UPDATED_AT: SORT_ORDER_DESC, + SORT_FIELD_DUE_AT: SORT_ORDER_ASC, + SORT_FIELD_PRIORITY: SORT_ORDER_DESC, + SORT_FIELD_ASSIGNEE: SORT_ORDER_ASC, +} + +MINIMUM_DEFERRAL_NOTICE_DAYS = 20 diff --git a/todo/dto/add_team_member_dto.py b/todo/dto/add_team_member_dto.py new file mode 100644 index 00000000..114ae486 --- /dev/null +++ b/todo/dto/add_team_member_dto.py @@ -0,0 +1,6 @@ +from pydantic import BaseModel, Field +from typing import List + + +class AddTeamMemberDTO(BaseModel): + member_ids: List[str] = Field(..., description="List of user IDs to add to the team") diff --git a/todo/dto/deferred_details_dto.py b/todo/dto/deferred_details_dto.py new file mode 100644 index 00000000..4c8bbc02 --- /dev/null +++ b/todo/dto/deferred_details_dto.py @@ -0,0 +1,9 @@ +from pydantic import BaseModel +from datetime import datetime +from todo.dto.user_dto import UserDTO + + +class DeferredDetailsDTO(BaseModel): + deferredAt: datetime + deferredTill: datetime + deferredBy: UserDTO diff --git a/todo/dto/label_dto.py b/todo/dto/label_dto.py index 0edf4840..b4887171 100644 --- a/todo/dto/label_dto.py +++ b/todo/dto/label_dto.py @@ -1,13 +1,7 @@ -from datetime import datetime from pydantic import BaseModel -from todo.dto.user_dto import UserDTO - class LabelDTO(BaseModel): + id: str name: str color: str - createdAt: datetime | None = None - updatedAt: datetime | None = None - createdBy: UserDTO | None = None - updatedBy: UserDTO | None = None diff --git a/todo/dto/responses/create_task_assignment_response.py b/todo/dto/responses/create_task_assignment_response.py new file mode 100644 index 00000000..b17c95c7 --- /dev/null +++ b/todo/dto/responses/create_task_assignment_response.py @@ -0,0 +1,7 @@ +from pydantic import BaseModel +from todo.dto.task_assignment_dto import TaskAssignmentDTO + + +class CreateTaskAssignmentResponse(BaseModel): + data: TaskAssignmentDTO + message: str = "Task assignment created successfully" diff --git a/todo/dto/responses/create_task_response.py b/todo/dto/responses/create_task_response.py index b6e59e08..bef04be0 100644 --- a/todo/dto/responses/create_task_response.py +++ b/todo/dto/responses/create_task_response.py @@ -2,6 +2,7 @@ from todo.dto.task_dto import TaskDTO from todo.constants.messages import AppMessages + class CreateTaskResponse(BaseModel): statusCode: int = 201 successMessage: str = AppMessages.TASK_CREATED diff --git a/todo/dto/responses/create_team_response.py b/todo/dto/responses/create_team_response.py new file mode 100644 index 00000000..5cbaaa73 --- /dev/null +++ b/todo/dto/responses/create_team_response.py @@ -0,0 +1,14 @@ +from pydantic import BaseModel +from todo.dto.team_dto import TeamDTO + + +class CreateTeamResponse(BaseModel): + """Response model for team creation endpoint. + + Attributes: + team: The newly created team details + message: Success or status message from the operation + """ + + team: TeamDTO + message: str diff --git a/todo/dto/responses/create_watchlist_response.py b/todo/dto/responses/create_watchlist_response.py new file mode 100644 index 00000000..e39b5afb --- /dev/null +++ b/todo/dto/responses/create_watchlist_response.py @@ -0,0 +1,9 @@ +from pydantic import BaseModel +from todo.dto.watchlist_dto import CreateWatchlistDTO +from todo.constants.messages import AppMessages + + +class CreateWatchlistResponse(BaseModel): + statusCode: int = 201 + successMessage: str = AppMessages.WATCHLIST_CREATED + data: CreateWatchlistDTO diff --git a/todo/dto/responses/error_response.py b/todo/dto/responses/error_response.py index 4126b980..359aa13e 100644 --- a/todo/dto/responses/error_response.py +++ b/todo/dto/responses/error_response.py @@ -7,6 +7,7 @@ class ApiErrorSource(Enum): PARAMETER = "parameter" POINTER = "pointer" HEADER = "header" + PATH = "path" class ApiErrorDetail(BaseModel): @@ -19,3 +20,4 @@ class ApiErrorResponse(BaseModel): statusCode: int message: str errors: List[ApiErrorDetail] + authenticated: bool | None = None diff --git a/todo/dto/responses/generate_team_creation_invite_code_response.py b/todo/dto/responses/generate_team_creation_invite_code_response.py new file mode 100644 index 00000000..02b478ad --- /dev/null +++ b/todo/dto/responses/generate_team_creation_invite_code_response.py @@ -0,0 +1,15 @@ +from pydantic import BaseModel, Field + + +class GenerateTeamCreationInviteCodeResponse(BaseModel): + """Response model for team creation invite code generation endpoint. + + Attributes: + code: The generated team creation invite code + description: Optional description for the code + message: Success or status message from the operation + """ + + code: str = Field(description="The generated team creation invite code") + description: str | None = Field(None, description="Optional description for the code") + message: str = Field(description="Success message confirming code generation") diff --git a/todo/dto/responses/get_labels_response.py b/todo/dto/responses/get_labels_response.py new file mode 100644 index 00000000..4a06dfe9 --- /dev/null +++ b/todo/dto/responses/get_labels_response.py @@ -0,0 +1,11 @@ +from typing import List + +from todo.dto.label_dto import LabelDTO +from todo.dto.responses.paginated_response import PaginatedResponse + + +class GetLabelsResponse(PaginatedResponse): + labels: List[LabelDTO] = [] + total: int = 0 + page: int = 1 + limit: int = 10 diff --git a/todo/dto/responses/get_task_by_id_response.py b/todo/dto/responses/get_task_by_id_response.py new file mode 100644 index 00000000..56873435 --- /dev/null +++ b/todo/dto/responses/get_task_by_id_response.py @@ -0,0 +1,6 @@ +from pydantic import BaseModel +from todo.dto.task_dto import TaskDTO + + +class GetTaskByIdResponse(BaseModel): + data: TaskDTO diff --git a/todo/dto/responses/get_team_creation_invite_codes_response.py b/todo/dto/responses/get_team_creation_invite_codes_response.py new file mode 100644 index 00000000..cadb6594 --- /dev/null +++ b/todo/dto/responses/get_team_creation_invite_codes_response.py @@ -0,0 +1,27 @@ +from pydantic import BaseModel, Field +from typing import List, Optional +from datetime import datetime + + +class TeamCreationInviteCodeListItemDTO(BaseModel): + """DTO for a single team creation invite code in the list.""" + + id: str = Field(description="Unique identifier for the team creation invite code") + code: str = Field(description="The actual invite code") + description: Optional[str] = Field(None, description="Optional description provided when generating the code") + created_by: dict = Field(description="User details of who created this code") + created_at: datetime = Field(description="Timestamp when the code was created") + used_at: Optional[datetime] = Field(None, description="Timestamp when the code was used (null if unused)") + used_by: Optional[dict] = Field(None, description="User details of who used this code (null if unused)") + is_used: bool = Field(description="Whether this code has been used for team creation") + + +class GetTeamCreationInviteCodesResponse(BaseModel): + """Response model for listing all team creation invite codes with pagination links.""" + + codes: List[TeamCreationInviteCodeListItemDTO] = Field( + description="List of team creation invite codes for current page" + ) + previous_url: Optional[str] = Field(None, description="URL for previous page (null if no previous page)") + next_url: Optional[str] = Field(None, description="URL for next page (null if no next page)") + message: str = Field(description="Success message") diff --git a/todo/dto/responses/get_user_teams_response.py b/todo/dto/responses/get_user_teams_response.py new file mode 100644 index 00000000..bc868fb7 --- /dev/null +++ b/todo/dto/responses/get_user_teams_response.py @@ -0,0 +1,8 @@ +from typing import List +from pydantic import BaseModel +from todo.dto.team_dto import TeamDTO + + +class GetUserTeamsResponse(BaseModel): + teams: List[TeamDTO] = [] + total: int = 0 diff --git a/todo/dto/responses/get_watchlist_task_response.py b/todo/dto/responses/get_watchlist_task_response.py new file mode 100644 index 00000000..ca25fab2 --- /dev/null +++ b/todo/dto/responses/get_watchlist_task_response.py @@ -0,0 +1,8 @@ +from typing import List + +from todo.dto.responses.paginated_response import PaginatedResponse +from todo.dto.watchlist_dto import WatchlistDTO + + +class GetWatchlistTasksResponse(PaginatedResponse): + tasks: List[WatchlistDTO] = [] diff --git a/todo/dto/role_dto.py b/todo/dto/role_dto.py new file mode 100644 index 00000000..46da1790 --- /dev/null +++ b/todo/dto/role_dto.py @@ -0,0 +1,55 @@ +from datetime import datetime +from typing import Optional +from pydantic import BaseModel + +from todo.models.role import RoleModel + + +class RoleDTO(BaseModel): + """ + Role Data Transfer Object + """ + + id: str + name: str + description: Optional[str] = None + scope: str + is_active: bool + created_by: str + created_at: datetime + updated_by: Optional[str] = None + updated_at: Optional[datetime] = None + + model_config = {"json_encoders": {datetime: lambda v: v.isoformat()}} + + @classmethod + def from_model(cls, role_model: RoleModel) -> "RoleDTO": + """ + Convert RoleModel to RoleDTO + + Args: + role_model: The RoleModel instance to convert + + Returns: + RoleDTO: The converted data transfer object + + Raises: + ValueError: If role_model is None or invalid + """ + required_attrs = ["id", "name", "scope", "is_active", "created_by", "created_at"] + if not all(hasattr(role_model, attr) for attr in required_attrs): + raise ValueError(f"role_model must have all required attributes: {', '.join(required_attrs)}") + + scope_value = role_model.scope.value if hasattr(role_model.scope, "value") else str(role_model.scope) + + return cls( + id=str(role_model.id), + name=role_model.name, + description=role_model.description, + scope=scope_value, + is_active=role_model.is_active, + created_by=role_model.created_by, + created_at=role_model.created_at, + updated_by=role_model.updated_by, + updated_at=role_model.updated_at, + ) diff --git a/todo/dto/task_assignment_dto.py b/todo/dto/task_assignment_dto.py new file mode 100644 index 00000000..63b00a5a --- /dev/null +++ b/todo/dto/task_assignment_dto.py @@ -0,0 +1,68 @@ +from pydantic import BaseModel, validator +from typing import Optional, Literal +from datetime import datetime +from bson import ObjectId + + +class CreateTaskAssignmentDTO(BaseModel): + task_id: str + assignee_id: str + user_type: Literal["user", "team"] + team_id: Optional[str] = None + + @validator("task_id") + def validate_task_id(cls, value): + """Validate that the task ID is a valid ObjectId.""" + if not ObjectId.is_valid(value): + raise ValueError(f"Invalid task ID: {value}") + return value + + @validator("assignee_id") + def validate_assignee_id(cls, value): + """Validate that the assignee ID is a valid ObjectId.""" + if not ObjectId.is_valid(value): + raise ValueError(f"Invalid assignee ID: {value}") + return value + + @validator("user_type") + def validate_user_type(cls, value): + """Validate that the user type is valid.""" + if value not in ["user", "team"]: + raise ValueError("user_type must be either 'user' or 'team'") + return value + + @validator("team_id") + def validate_team_id(cls, value): + """Validate that the original team ID is a valid ObjectId if provided.""" + if value is not None and not ObjectId.is_valid(value): + raise ValueError(f"Invalid original team ID: {value}") + return value + + +class TaskAssignmentDTO(BaseModel): + id: str + task_id: str + assignee_id: str + assignee_name: Optional[str] = None + user_type: Literal["user", "team"] + executor_id: Optional[str] = None # User ID executing the task (for team assignments) + team_id: Optional[str] = None + is_active: bool + created_by: str + updated_by: Optional[str] = None + created_at: datetime + updated_at: Optional[datetime] = None + + +class TaskAssignmentResponseDTO(BaseModel): + id: str + task_id: str + assignee_id: str + user_type: Literal["user", "team"] + assignee_name: Optional[str] = None + executor_id: Optional[str] = None # User ID executing the task (for team assignments) + is_active: bool + created_by: str + updated_by: Optional[str] = None + created_at: datetime + updated_at: Optional[datetime] = None diff --git a/todo/dto/task_dto.py b/todo/dto/task_dto.py index 2490241a..4d0b4000 100644 --- a/todo/dto/task_dto.py +++ b/todo/dto/task_dto.py @@ -1,10 +1,14 @@ from datetime import datetime -from typing import List +from typing import List, Optional +from bson import ObjectId from pydantic import BaseModel, field_validator +from todo.constants.messages import ValidationErrors from todo.constants.task import TaskPriority, TaskStatus +from todo.dto.deferred_details_dto import DeferredDetailsDTO from todo.dto.label_dto import LabelDTO from todo.dto.user_dto import UserDTO +from todo.dto.task_assignment_dto import TaskAssignmentDTO class TaskDTO(BaseModel): @@ -14,11 +18,13 @@ class TaskDTO(BaseModel): description: str | None = None priority: TaskPriority | None = None status: TaskStatus | None = None - assignee: UserDTO | None = None + assignee: TaskAssignmentDTO | None = None isAcknowledged: bool | None = None labels: List[LabelDTO] = [] startedAt: datetime | None = None dueAt: datetime | None = None + deferredDetails: DeferredDetailsDTO | None = None + in_watchlist: Optional[bool] = None createdAt: datetime updatedAt: datetime | None = None createdBy: UserDTO @@ -33,9 +39,10 @@ class CreateTaskDTO(BaseModel): description: str | None = None priority: TaskPriority = TaskPriority.LOW status: TaskStatus = TaskStatus.TODO - assignee: str | None = None + assignee: dict | None = None # {"assignee_id": str, "user_type": "team"|"user"} labels: List[str] = [] dueAt: datetime | None = None + createdBy: str @field_validator("priority", mode="before") def parse_priority(cls, value): @@ -48,3 +55,9 @@ def parse_status(cls, value): if isinstance(value, str): return TaskStatus[value] return value + + @field_validator("createdBy") + def validate_created_by(cls, value: str) -> str: + if not ObjectId.is_valid(value): + raise ValueError(ValidationErrors.INVALID_OBJECT_ID.format(value)) + return value diff --git a/todo/dto/team_creation_invite_code_dto.py b/todo/dto/team_creation_invite_code_dto.py new file mode 100644 index 00000000..eb58dcd6 --- /dev/null +++ b/todo/dto/team_creation_invite_code_dto.py @@ -0,0 +1,30 @@ +from pydantic import BaseModel, Field +from typing import Optional +from datetime import datetime + + +class GenerateTeamCreationInviteCodeDTO(BaseModel): + """DTO for generating team creation invite codes. + + Allows admins to create invite codes with an optional description for tracking purposes.""" + + description: Optional[str] = None + + +class VerifyTeamCreationInviteCodeDTO(BaseModel): + """DTO for verifying team creation invite codes.""" + + code: str + + +class TeamCreationInviteCodeDTO(BaseModel): + """DTO for team creation invite code data.""" + + id: str = Field(description="Unique identifier for the team invite code") + code: str = Field(description="The actual invite code") + description: Optional[str] = Field(None, description="Optional description provided when generating the code") + created_by: str = Field(description="User ID of the admin who generated this code") + created_at: datetime = Field(description="Timestamp when the code was created") + used_at: Optional[datetime] = Field(None, description="Timestamp when the code was used (null if unused)") + used_by: Optional[str] = Field(None, description="User ID who used this code (null if unused)") + is_used: bool = Field(description="Whether this code has been used for team creation") diff --git a/todo/dto/team_dto.py b/todo/dto/team_dto.py new file mode 100644 index 00000000..1d0c5683 --- /dev/null +++ b/todo/dto/team_dto.py @@ -0,0 +1,52 @@ +from pydantic import BaseModel, validator +from typing import List, Optional +from datetime import datetime +from todo.repositories.user_repository import UserRepository + + +class CreateTeamDTO(BaseModel): + name: str + description: Optional[str] = None + member_ids: Optional[List[str]] = None + poc_id: Optional[str] = None + team_invite_code: str + + @validator("member_ids") + def validate_member_ids(cls, value): + """Validate that all member IDs exist in the database.""" + if value is None: + return value + + invalid_ids = [] + for member_id in value: + user = UserRepository.get_by_id(member_id) + if not user: + invalid_ids.append(member_id) + + if invalid_ids: + raise ValueError(f"Invalid member IDs: {invalid_ids}") + return value + + @validator("poc_id") + def validate_poc_id(cls, value): + """Validate that the POC ID exists in the database.""" + if value is None: + return value + + user = UserRepository.get_by_id(value) + if not user: + raise ValueError(f"Invalid POC ID: {value}") + return value + + +class TeamDTO(BaseModel): + id: str + name: str + description: Optional[str] = None + poc_id: Optional[str] = None + invite_code: str + created_by: str + updated_by: str + created_at: datetime + updated_at: datetime + users: Optional[list] = None # list of dicts with addedOn when member=true diff --git a/todo/dto/update_team_dto.py b/todo/dto/update_team_dto.py new file mode 100644 index 00000000..b0395b2f --- /dev/null +++ b/todo/dto/update_team_dto.py @@ -0,0 +1,54 @@ +from pydantic import BaseModel, field_validator +from typing import Optional +from todo.repositories.user_repository import UserRepository + + +class UpdateTeamDTO(BaseModel): + name: Optional[str] = None + description: Optional[str] = None + poc_id: Optional[str] = None + member_ids: Optional[list[str]] = None + + @field_validator("name") + @classmethod + def validate_name(cls, value): + """Validate that name is not empty if provided.""" + if value is not None and not value.strip(): + raise ValueError("Team name cannot be blank") + return value.strip() if value else None + + @field_validator("description") + @classmethod + def validate_description(cls, value): + """Validate that description is not empty if provided.""" + if value is not None: + return value.strip() + return value + + @field_validator("poc_id") + @classmethod + def validate_poc_id(cls, value): + """Validate that the POC ID exists in the database if provided.""" + if value is None: + return value + + user = UserRepository.get_by_id(value) + if not user: + raise ValueError(f"Invalid POC ID: {value}") + return value + + @field_validator("member_ids") + @classmethod + def validate_member_ids(cls, value): + """Validate that all member IDs exist in the database if provided.""" + if value is None: + return value + + # Batch validate all member IDs in a single database query + existing_users = UserRepository.get_by_ids(value) + existing_ids = {str(user.id) for user in existing_users} + invalid_ids = [member_id for member_id in value if member_id not in existing_ids] + + if invalid_ids: + raise ValueError(f"Invalid member IDs: {invalid_ids}") + return value diff --git a/todo/dto/user_dto.py b/todo/dto/user_dto.py index 7c298b13..337edc54 100644 --- a/todo/dto/user_dto.py +++ b/todo/dto/user_dto.py @@ -1,6 +1,30 @@ from pydantic import BaseModel +from datetime import datetime +from typing import List, Optional class UserDTO(BaseModel): id: str name: str + addedOn: Optional[datetime] = None + tasksAssignedCount: Optional[int] = None + + +class UserSearchDTO(BaseModel): + id: str + name: str + email_id: str + created_at: datetime + updated_at: datetime | None = None + + +class UsersDTO(BaseModel): + id: str + name: str + + +class UserSearchResponseDTO(BaseModel): + users: List[UsersDTO] + total_count: int + page: int + limit: int diff --git a/todo/dto/watchlist_dto.py b/todo/dto/watchlist_dto.py new file mode 100644 index 00000000..3cdd284e --- /dev/null +++ b/todo/dto/watchlist_dto.py @@ -0,0 +1,48 @@ +from datetime import datetime +from pydantic import BaseModel +from typing import Optional + +from todo.constants.task import TaskPriority, TaskStatus +from todo.models.task import DeferredDetailsModel +from todo.dto.user_dto import UserDTO + + +class AssigneeDTO(BaseModel): + assignee_id: str + assignee_name: str + user_type: str # "user" or "team" + + +class WatchlistDTO(BaseModel): + taskId: str + displayId: str + title: str + description: Optional[str] = None + priority: Optional[TaskPriority] = None + deferredDetails: Optional[DeferredDetailsModel] = None + status: Optional[TaskStatus] = None + isAcknowledged: Optional[bool] = None + isDeleted: Optional[bool] = None + labels: list = [] + dueAt: Optional[datetime] = None + createdAt: datetime + createdBy: UserDTO + watchlistId: str + assignee: Optional[AssigneeDTO] = None + + class Config: + json_encoders = {TaskPriority: lambda x: x.name} + + +class CreateWatchlistDTO(BaseModel): + taskId: str + userId: str + isActive: bool = True + createdAt: datetime | None = None + createdBy: str | None = None + updatedAt: datetime | None = None + updatedBy: str | None = None + + +class UpdateWatchlistDTO(BaseModel): + isActive: bool diff --git a/todo/exceptions/auth_exceptions.py b/todo/exceptions/auth_exceptions.py new file mode 100644 index 00000000..6d8309a2 --- /dev/null +++ b/todo/exceptions/auth_exceptions.py @@ -0,0 +1,42 @@ +from todo.constants.messages import AuthErrorMessages, ApiErrors, RepositoryErrors + + +class BaseAuthException(Exception): + def __init__(self, message: str): + self.message = message + super().__init__(self.message) + + +class AuthException(BaseAuthException): + def __init__(self, message: str = ApiErrors.GOOGLE_AUTH_FAILED): + super().__init__(message) + + +class TokenExpiredError(BaseAuthException): + def __init__(self, message: str = AuthErrorMessages.TOKEN_EXPIRED): + super().__init__(message) + + +class TokenMissingError(BaseAuthException): + def __init__(self, message: str = AuthErrorMessages.NO_ACCESS_TOKEN): + super().__init__(message) + + +class TokenInvalidError(BaseAuthException): + def __init__(self, message: str = AuthErrorMessages.TOKEN_INVALID): + super().__init__(message) + + +class RefreshTokenExpiredError(BaseAuthException): + def __init__(self, message: str = AuthErrorMessages.REFRESH_TOKEN_EXPIRED): + super().__init__(message) + + +class APIException(BaseAuthException): + def __init__(self, message: str = ApiErrors.GOOGLE_API_ERROR): + super().__init__(message) + + +class UserNotFoundException(BaseAuthException): + def __init__(self, message: str = RepositoryErrors.USER_NOT_FOUND): + super().__init__(message) diff --git a/todo/exceptions/exception_handler.py b/todo/exceptions/exception_handler.py index 397ef6c4..b52f06cb 100644 --- a/todo/exceptions/exception_handler.py +++ b/todo/exceptions/exception_handler.py @@ -1,36 +1,294 @@ from typing import List -from rest_framework.exceptions import ValidationError +from rest_framework.exceptions import ValidationError as DRFValidationError from rest_framework.response import Response from rest_framework import status -from rest_framework.views import exception_handler +from rest_framework.views import exception_handler as drf_exception_handler from rest_framework.utils.serializer_helpers import ReturnDict +from django.conf import settings +from bson.errors import InvalidId as BsonInvalidId from todo.dto.responses.error_response import ApiErrorDetail, ApiErrorResponse, ApiErrorSource - - -def handle_exception(exc, context): - if isinstance(exc, ValidationError): - return Response( - ApiErrorResponse( - statusCode=status.HTTP_400_BAD_REQUEST, - message="Invalid request", - errors=format_validation_errors(exc.detail), - ).model_dump(mode="json", exclude_none=True), - status=status.HTTP_400_BAD_REQUEST, - ) - return exception_handler(exc, context) +from todo.constants.messages import ApiErrors, ValidationErrors, AuthErrorMessages +from todo.exceptions.task_exceptions import ( + TaskNotFoundException, + UnprocessableEntityException, + TaskStateConflictException, +) +from .auth_exceptions import ( + AuthException, + TokenExpiredError, + TokenInvalidError, + RefreshTokenExpiredError, + APIException, + UserNotFoundException, + TokenMissingError, +) def format_validation_errors(errors) -> List[ApiErrorDetail]: formatted_errors = [] if isinstance(errors, ReturnDict | dict): for field, messages in errors.items(): - if isinstance(messages, list): - for message in messages: - formatted_errors.append(ApiErrorDetail(detail=message, source={ApiErrorSource.PARAMETER: field})) - elif isinstance(messages, dict): - nested_errors = format_validation_errors(messages) - formatted_errors.extend(nested_errors) - else: - formatted_errors.append(ApiErrorDetail(detail=messages, source={ApiErrorSource.PARAMETER: field})) + details = messages if isinstance(messages, list) else [messages] + for message_detail in details: + if isinstance(message_detail, dict): + nested_errors = format_validation_errors(message_detail) + formatted_errors.extend(nested_errors) + else: + formatted_errors.append( + ApiErrorDetail(detail=str(message_detail), source={ApiErrorSource.PARAMETER: field}) + ) + elif isinstance(errors, list): + for message_detail in errors: + formatted_errors.append(ApiErrorDetail(detail=str(message_detail))) return formatted_errors + + +def handle_exception(exc, context): + response = drf_exception_handler(exc, context) + task_id = context.get("kwargs", {}).get("task_id") + user_id = context.get("kwargs", {}).get("user_id") + + error_list = [] + status_code = status.HTTP_500_INTERNAL_SERVER_ERROR + + if isinstance(exc, TokenExpiredError): + status_code = status.HTTP_401_UNAUTHORIZED + error_list.append( + ApiErrorDetail( + source={ApiErrorSource.HEADER: "Authorization"}, + title=AuthErrorMessages.TOKEN_EXPIRED_TITLE, + detail=str(exc), + ) + ) + elif isinstance(exc, TokenMissingError): + status_code = status.HTTP_401_UNAUTHORIZED + error_list.append( + ApiErrorDetail( + source={ApiErrorSource.HEADER: "Authorization"}, + title=AuthErrorMessages.AUTHENTICATION_REQUIRED, + detail=str(exc), + ) + ) + final_response_data = ApiErrorResponse( + statusCode=status_code, + message=str(exc) if not error_list else error_list[0].detail, + errors=error_list, + authenticated=False, + ) + return Response(data=final_response_data.model_dump(mode="json", exclude_none=True), status=status_code) + elif isinstance(exc, TokenInvalidError): + status_code = status.HTTP_401_UNAUTHORIZED + error_list.append( + ApiErrorDetail( + source={ApiErrorSource.HEADER: "Authorization"}, + title=AuthErrorMessages.INVALID_TOKEN_TITLE, + detail=str(exc), + ) + ) + final_response_data = ApiErrorResponse( + statusCode=status_code, + message=str(exc) if not error_list else error_list[0].detail, + errors=error_list, + authenticated=False, + ) + return Response(data=final_response_data.model_dump(mode="json", exclude_none=True), status=status_code) + + elif isinstance(exc, TokenMissingError): + status_code = status.HTTP_401_UNAUTHORIZED + error_list.append( + ApiErrorDetail( + source={ApiErrorSource.HEADER: "Authorization"}, + title=AuthErrorMessages.AUTHENTICATION_REQUIRED, + detail=str(exc), + ) + ) + final_response_data = ApiErrorResponse( + statusCode=status_code, + message=str(exc) if not error_list else error_list[0].detail, + errors=error_list, + authenticated=False, + ) + return Response(data=final_response_data.model_dump(mode="json", exclude_none=True), status=status_code) + elif isinstance(exc, TokenExpiredError): + status_code = status.HTTP_401_UNAUTHORIZED + error_list.append( + ApiErrorDetail( + source={ApiErrorSource.HEADER: "Authorization"}, + title=AuthErrorMessages.TOKEN_EXPIRED_TITLE, + detail=str(exc), + ) + ) + final_response_data = ApiErrorResponse( + statusCode=status_code, + message=str(exc) if not error_list else error_list[0].detail, + errors=error_list, + authenticated=False, + ) + return Response(data=final_response_data.model_dump(mode="json", exclude_none=True), status=status_code) + elif isinstance(exc, TokenInvalidError): + status_code = status.HTTP_401_UNAUTHORIZED + error_list.append( + ApiErrorDetail( + source={ApiErrorSource.HEADER: "Authorization"}, + title=AuthErrorMessages.INVALID_TOKEN_TITLE, + detail=str(exc), + ) + ) + final_response_data = ApiErrorResponse( + statusCode=status_code, + message=str(exc) if not error_list else error_list[0].detail, + errors=error_list, + authenticated=False, + ) + return Response(data=final_response_data.model_dump(mode="json", exclude_none=True), status=status_code) + elif isinstance(exc, RefreshTokenExpiredError): + status_code = status.HTTP_403_FORBIDDEN + error_list.append( + ApiErrorDetail( + source={ApiErrorSource.HEADER: "Authorization"}, + title=AuthErrorMessages.TOKEN_EXPIRED_TITLE, + detail=str(exc), + ) + ) + elif isinstance(exc, AuthException): + status_code = status.HTTP_400_BAD_REQUEST + error_list.append( + ApiErrorDetail( + source={ApiErrorSource.PARAMETER: "google_auth"}, + title=ApiErrors.GOOGLE_AUTH_FAILED, + detail=str(exc), + ) + ) + elif isinstance(exc, APIException): + status_code = status.HTTP_500_INTERNAL_SERVER_ERROR + error_list.append( + ApiErrorDetail( + source={ApiErrorSource.PARAMETER: "google_api"}, + title=ApiErrors.GOOGLE_API_ERROR, + detail=str(exc), + ) + ) + elif isinstance(exc, UserNotFoundException): + status_code = status.HTTP_404_NOT_FOUND + error_list.append( + ApiErrorDetail( + source={ApiErrorSource.PARAMETER: "user_id"}, + title=ApiErrors.RESOURCE_NOT_FOUND_TITLE, + detail=str(exc), + ) + ) + elif isinstance(exc, TaskNotFoundException): + status_code = status.HTTP_404_NOT_FOUND + error_list.append( + ApiErrorDetail( + source={ApiErrorSource.PATH: "task_id"} if task_id else None, + title=ApiErrors.RESOURCE_NOT_FOUND_TITLE, + detail=str(exc), + ) + ) + + elif isinstance(exc, UserNotFoundException): + status_code = status.HTTP_404_NOT_FOUND + error_list.append( + ApiErrorDetail( + source={ApiErrorSource.PATH: "user_id"} if user_id else None, + title=ApiErrors.RESOURCE_NOT_FOUND_TITLE, + detail=str(exc), + ) + ) + + elif isinstance(exc, PermissionError): + status_code = status.HTTP_403_FORBIDDEN + error_list.append( + ApiErrorDetail( + title=ApiErrors.UNAUTHORIZED_TITLE if hasattr(ApiErrors, "UNAUTHORIZED_TITLE") else "Permission Denied", + detail=str(exc), + ) + ) + + elif isinstance(exc, UnprocessableEntityException): + status_code = status.HTTP_422_UNPROCESSABLE_ENTITY + determined_message = str(exc) + error_list.append( + ApiErrorDetail(source=exc.source, title=ApiErrors.VALIDATION_ERROR, detail=determined_message) + ) + elif isinstance(exc, TaskStateConflictException): + status_code = status.HTTP_409_CONFLICT + error_list.append( + ApiErrorDetail( + source={"path": "task_id"}, + title=ApiErrors.STATE_CONFLICT_TITLE, + detail=str(exc), + ) + ) + elif isinstance(exc, BsonInvalidId): + status_code = status.HTTP_400_BAD_REQUEST + error_list.append( + ApiErrorDetail( + source={ApiErrorSource.PATH: "task_id"} if task_id else None, + title=ApiErrors.VALIDATION_ERROR, + detail=ValidationErrors.INVALID_TASK_ID_FORMAT, + ) + ) + elif ( + isinstance(exc, ValueError) + and hasattr(exc, "args") + and exc.args + and (exc.args[0] == ValidationErrors.INVALID_TASK_ID_FORMAT or exc.args[0] == "Invalid ObjectId format") + ): + status_code = status.HTTP_400_BAD_REQUEST + error_list.append( + ApiErrorDetail( + source={ApiErrorSource.PATH: "task_id"} if task_id else None, + title=ApiErrors.VALIDATION_ERROR, + detail=ValidationErrors.INVALID_TASK_ID_FORMAT, + ) + ) + elif ( + isinstance(exc, ValueError) and hasattr(exc, "args") and exc.args and isinstance(exc.args[0], ApiErrorResponse) + ): + api_error_response = exc.args[0] + return Response( + data=api_error_response.model_dump(mode="json", exclude_none=True), status=api_error_response.statusCode + ) + elif isinstance(exc, DRFValidationError): + status_code = status.HTTP_400_BAD_REQUEST + error_list = format_validation_errors(exc.detail) + if not error_list and exc.detail: + error_list.append(ApiErrorDetail(detail=str(exc.detail), title=ApiErrors.VALIDATION_ERROR)) + + else: + if response is not None: + status_code = response.status_code + if isinstance(response.data, dict) and "detail" in response.data: + detail_str = str(response.data["detail"]) + error_list.append(ApiErrorDetail(detail=detail_str, title=detail_str)) + elif isinstance(response.data, list): + for item_error in response.data: + error_list.append(ApiErrorDetail(detail=str(item_error), title=str(exc))) + else: + error_list.append( + ApiErrorDetail( + detail=str(response.data) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR, + title=str(exc), + ) + ) + else: + error_list.append( + ApiErrorDetail(detail=str(exc) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR, title=str(exc)) + ) + + if not error_list and not ( + isinstance(exc, ValueError) and hasattr(exc, "args") and exc.args and isinstance(exc.args[0], ApiErrorResponse) + ): + default_detail_str = str(exc) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR + + error_list.append(ApiErrorDetail(detail=default_detail_str, title=str(exc))) + + final_response_data = ApiErrorResponse( + statusCode=status_code, + message=str(exc) if not error_list else error_list[0].detail, + errors=error_list, + ) + return Response(data=final_response_data.model_dump(mode="json", exclude_none=True), status=status_code) diff --git a/todo/exceptions/global_exception_handler.py b/todo/exceptions/global_exception_handler.py new file mode 100644 index 00000000..ea0289f2 --- /dev/null +++ b/todo/exceptions/global_exception_handler.py @@ -0,0 +1,68 @@ +import logging +from typing import Dict, Any, Callable +from functools import wraps +from rest_framework import status +from rest_framework.response import Response +from django.conf import settings + +from todo.exceptions.role_exceptions import ( + RoleNotFoundException, + RoleOperationException, +) + +logger = logging.getLogger(__name__) + + +def handle_exceptions(func: Callable) -> Callable: + """ + Decorator for automatic exception handling in views. + """ + + @wraps(func) + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except RoleNotFoundException as e: + logger.error(f"RoleNotFoundException: {e}") + return Response({"error": str(e)}, status=status.HTTP_404_NOT_FOUND) + except RoleOperationException as e: + logger.error(f"RoleOperationException: {e}") + return Response({"error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + except Exception as e: + logger.error(f"Unexpected error: {e}", exc_info=True) + error_message = str(e) if settings.DEBUG else "Internal server error" + return Response({"error": error_message}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + return wrapper + + +class GlobalExceptionHandler: + """ + Class-based exception handler for centralized exception management. + Similar to Spring's @ControllerAdvice pattern. + """ + + @staticmethod + def handle_role_not_found(exc: RoleNotFoundException) -> Dict[str, Any]: + """Handle RoleNotFoundException""" + logger.error(f"Role not found: {exc}") + return {"error": str(exc), "status_code": status.HTTP_404_NOT_FOUND} + + @staticmethod + def handle_role_operation_error(exc: RoleOperationException) -> Dict[str, Any]: + """Handle RoleOperationException""" + logger.error(f"Role operation failed: {exc}") + return {"error": str(exc), "status_code": status.HTTP_500_INTERNAL_SERVER_ERROR} + + @staticmethod + def handle_validation_error(exc: Exception) -> Dict[str, Any]: + """Handle validation errors""" + logger.error(f"Validation error: {exc}") + return {"error": "Validation failed", "details": str(exc), "status_code": status.HTTP_400_BAD_REQUEST} + + @staticmethod + def handle_generic_error(exc: Exception) -> Dict[str, Any]: + """Handle generic exceptions""" + logger.error(f"Unexpected error: {exc}", exc_info=True) + error_message = str(exc) if settings.DEBUG else "Internal server error" + return {"error": error_message, "status_code": status.HTTP_500_INTERNAL_SERVER_ERROR} diff --git a/todo/exceptions/role_exceptions.py b/todo/exceptions/role_exceptions.py new file mode 100644 index 00000000..3251866c --- /dev/null +++ b/todo/exceptions/role_exceptions.py @@ -0,0 +1,31 @@ +class RoleNotFoundException(Exception): + """Exception raised when a role is not found.""" + + def __init__(self, role_id: str | None = None, role_name: str | None = None): + if role_id: + message = f"Role with ID '{role_id}' not found" + elif role_name: + message = f"Role with name '{role_name}' not found" + else: + message = "Role not found" + + super().__init__(message) + self.role_id = role_id + self.role_name = role_name + + +class RoleOperationException(Exception): + """Exception raised when a role operation fails.""" + + def __init__(self, message: str, operation: str | None = None, role_id: str | None = None): + if operation and role_id: + full_message = f"Role operation '{operation}' failed for role ID '{role_id}': {message}" + elif operation: + full_message = f"Role operation '{operation}' failed: {message}" + else: + full_message = message + + super().__init__(full_message) + self.operation = operation + self.role_id = role_id + self.original_message = message diff --git a/todo/exceptions/task_exceptions.py b/todo/exceptions/task_exceptions.py new file mode 100644 index 00000000..334ff1f0 --- /dev/null +++ b/todo/exceptions/task_exceptions.py @@ -0,0 +1,23 @@ +from todo.constants.messages import ApiErrors + + +class TaskNotFoundException(Exception): + def __init__(self, task_id: str | None = None, message_template: str = ApiErrors.TASK_NOT_FOUND): + if task_id: + self.message = message_template.format(task_id) + else: + self.message = ApiErrors.TASK_NOT_FOUND_GENERIC + super().__init__(self.message) + + +class UnprocessableEntityException(Exception): + def __init__(self, message: str, source: dict | None = None): + self.message = message + self.source = source + super().__init__(self.message) + + +class TaskStateConflictException(Exception): + def __init__(self, message: str): + self.message = message + super().__init__(self.message) diff --git a/todo/exceptions/user_exceptions.py b/todo/exceptions/user_exceptions.py new file mode 100644 index 00000000..a2920753 --- /dev/null +++ b/todo/exceptions/user_exceptions.py @@ -0,0 +1,13 @@ +from todo.constants.messages import ApiErrors + + +class UserNotFoundException(Exception): + def __init__(self, user_id: str | None = None, message_template: str = ApiErrors.USER_NOT_FOUND): + if user_id: + try: + self.message = message_template.format(user_id) + except (KeyError, ValueError): + self.message = f"{message_template} (ID: {user_id})" + else: + self.message = ApiErrors.USER_NOT_FOUND_GENERIC + super().__init__(self.message) diff --git a/todo/management/__init__.py b/todo/management/__init__.py new file mode 100644 index 00000000..f7ec5626 --- /dev/null +++ b/todo/management/__init__.py @@ -0,0 +1 @@ +# Empty __init__.py file to make this directory a Python package diff --git a/todo/management/commands/__init__.py b/todo/management/commands/__init__.py new file mode 100644 index 00000000..f7ec5626 --- /dev/null +++ b/todo/management/commands/__init__.py @@ -0,0 +1 @@ +# Empty __init__.py file to make this directory a Python package diff --git a/todo/management/commands/migrate_add_creator_to_teams.py b/todo/management/commands/migrate_add_creator_to_teams.py new file mode 100644 index 00000000..b67d6d5b --- /dev/null +++ b/todo/management/commands/migrate_add_creator_to_teams.py @@ -0,0 +1,35 @@ +from django.core.management.base import BaseCommand +from todo.models.team import UserTeamDetailsModel +from todo.repositories.team_repository import TeamRepository, UserTeamDetailsRepository +from todo.models.common.pyobjectid import PyObjectId + + +class Command(BaseCommand): + help = "Backfill user_team_details so every team has its creator as an active member." + + def handle(self, *args, **options): + teams = TeamRepository.get_collection().find({"is_deleted": False}) + updated = 0 + for team in teams: + team_id = team["_id"] + creator_id = team["created_by"] + # Check if creator is already an active member + exists = UserTeamDetailsRepository.get_collection().find_one( + { + "team_id": team_id, + "user_id": creator_id, + "is_active": True, + } + ) + if not exists: + user_team = UserTeamDetailsModel( + user_id=PyObjectId(creator_id), + team_id=PyObjectId(team_id), + role_id="1", + is_active=True, + created_by=PyObjectId(creator_id), + updated_by=PyObjectId(creator_id), + ) + UserTeamDetailsRepository.create(user_team) + updated += 1 + self.stdout.write(self.style.SUCCESS(f"Added creator as member to {updated} teams.")) diff --git a/todo/management/commands/migrate_labels.py b/todo/management/commands/migrate_labels.py new file mode 100644 index 00000000..e4b1fece --- /dev/null +++ b/todo/management/commands/migrate_labels.py @@ -0,0 +1,16 @@ +from django.core.management.base import BaseCommand +from todo_project.db.migrations import run_all_migrations + + +class Command(BaseCommand): + help = "Run database migrations including fixed labels" + + def handle(self, *args, **options): + self.stdout.write(self.style.SUCCESS("Starting database migrations...")) + + success = run_all_migrations() + + if success: + self.stdout.write(self.style.SUCCESS("All database migrations completed successfully!")) + else: + self.stdout.write(self.style.ERROR("Some database migrations failed!")) diff --git a/todo/management/commands/migrate_roles.py b/todo/management/commands/migrate_roles.py new file mode 100644 index 00000000..ff8897c6 --- /dev/null +++ b/todo/management/commands/migrate_roles.py @@ -0,0 +1,16 @@ +from django.core.management.base import BaseCommand +from todo_project.db.migrations import run_all_migrations + + +class Command(BaseCommand): + help = "Run database migrations including predefined roles" + + def handle(self, *args, **options): + self.stdout.write("Starting database migrations...") + + success = run_all_migrations() + + if success: + self.stdout.write("All database migrations completed successfully!") + else: + self.stdout.write("Some database migrations failed!") diff --git a/todo/management/commands/runserver_debug.py b/todo/management/commands/runserver_debug.py new file mode 100644 index 00000000..e9daea49 --- /dev/null +++ b/todo/management/commands/runserver_debug.py @@ -0,0 +1,59 @@ +import debugpy +import socket +from django.core.management.commands.runserver import Command as RunServerCommand + + +class Command(RunServerCommand): + help = "Run the Django development server with debugpy for VS Code debugging" + + def add_arguments(self, parser): + super().add_arguments(parser) + parser.add_argument("--debug-port", type=int, default=5678, help="Port for the debug server (default: 5678)") + parser.add_argument( + "--wait-for-client", action="store_true", help="Wait for debugger client to attach before starting server" + ) + + def is_port_in_use(self, port): + """Check if a port is already in use""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + try: + s.bind(("0.0.0.0", port)) + return False + except OSError: + return True + + def handle(self, *args, **options): + debug_port = options.get("debug_port", 5678) + wait_for_client = options.get("wait_for_client", False) + + # Check if debugpy is already initialized or connected + if debugpy.is_client_connected(): + self.stdout.write(self.style.WARNING(f"Debugger already connected on port {debug_port}")) + else: + # Check if debug port is in use + if self.is_port_in_use(debug_port): + self.stdout.write(self.style.ERROR(f"Port {debug_port} is already in use. Debug server not started.")) + self.stdout.write(self.style.WARNING("Django server will start without debug capability.")) + else: + try: + # Only configure debugpy if not already configured + if not hasattr(debugpy, "_is_configured") or not debugpy._is_configured: + # Listen for debugger connections + debugpy.listen(("0.0.0.0", debug_port)) + self.stdout.write(self.style.SUCCESS(f"Debug server listening on port {debug_port}")) + + if wait_for_client: + self.stdout.write(self.style.WARNING("Waiting for debugger client to attach...")) + debugpy.wait_for_client() + self.stdout.write(self.style.SUCCESS("Debugger client attached!")) + else: + self.stdout.write(self.style.SUCCESS("Server starting - you can now attach the debugger")) + else: + self.stdout.write(self.style.WARNING("Debug server already configured")) + + except Exception as e: + self.stdout.write(self.style.ERROR(f"Failed to start debug server: {str(e)}")) + self.stdout.write(self.style.WARNING("Django server will start without debug capability.")) + + # Call the parent runserver command + super().handle(*args, **options) diff --git a/todo/management/commands/sync_postgres_tables.py b/todo/management/commands/sync_postgres_tables.py new file mode 100644 index 00000000..32f160b7 --- /dev/null +++ b/todo/management/commands/sync_postgres_tables.py @@ -0,0 +1,31 @@ +from django.core.management.base import BaseCommand +from todo.services.postgres_sync_service import PostgresSyncService + + +class Command(BaseCommand): + help = "Synchronize labels and roles PostgreSQL tables with MongoDB data" + + def add_arguments(self, parser): + parser.add_argument( + "--force", + action="store_true", + help="Force sync even if tables already have data", + ) + + def handle(self, *args, **options): + self.stdout.write(self.style.SUCCESS("Starting PostgreSQL table synchronization for labels and roles...")) + + try: + postgres_sync_service = PostgresSyncService() + + if options["force"]: + self.stdout.write("Force sync enabled - will sync all tables regardless of existing data") + + success = postgres_sync_service.sync_all_tables() + + if success: + self.stdout.write(self.style.SUCCESS("PostgreSQL table synchronization completed successfully!")) + else: + self.stdout.write(self.style.ERROR("Some PostgreSQL table synchronizations failed!")) + except Exception as e: + self.stdout.write(self.style.ERROR(f"PostgreSQL table synchronization failed: {str(e)}")) diff --git a/todo/middlewares/jwt_auth.py b/todo/middlewares/jwt_auth.py new file mode 100644 index 00000000..46a0f21c --- /dev/null +++ b/todo/middlewares/jwt_auth.py @@ -0,0 +1,169 @@ +from django.conf import settings +from rest_framework import status +from django.http import JsonResponse +from todo.utils.jwt_utils import ( + validate_access_token, + validate_refresh_token, + generate_access_token, +) +from todo.exceptions.auth_exceptions import ( + TokenExpiredError, + TokenInvalidError, + RefreshTokenExpiredError, + TokenMissingError, +) +from todo.constants.messages import AuthErrorMessages, ApiErrors +from todo.dto.responses.error_response import ApiErrorResponse, ApiErrorDetail +from todo.repositories.user_repository import UserRepository + + +class JWTAuthenticationMiddleware: + def __init__(self, get_response) -> None: + self.get_response = get_response + + def __call__(self, request): + path = request.path + + if self._is_public_path(path): + return self.get_response(request) + + try: + auth_success = self._try_authentication(request) + if auth_success: + response = self.get_response(request) + return self._process_response(request, response) + else: + error_response = ApiErrorResponse( + statusCode=status.HTTP_401_UNAUTHORIZED, + message=AuthErrorMessages.AUTHENTICATION_REQUIRED, + errors=[ + ApiErrorDetail( + title=ApiErrors.AUTHENTICATION_FAILED, + detail=AuthErrorMessages.AUTHENTICATION_REQUIRED, + ) + ], + ) + return JsonResponse( + data=error_response.model_dump(mode="json", exclude_none=True), + status=status.HTTP_401_UNAUTHORIZED, + ) + + except (TokenMissingError, TokenExpiredError, TokenInvalidError) as e: + return self._handle_auth_error(e) + except Exception: + error_response = ApiErrorResponse( + statusCode=status.HTTP_401_UNAUTHORIZED, + message=ApiErrors.AUTHENTICATION_FAILED, + errors=[ + ApiErrorDetail( + title=ApiErrors.AUTHENTICATION_FAILED, + detail=AuthErrorMessages.AUTHENTICATION_REQUIRED, + ) + ], + ) + return JsonResponse( + data=error_response.model_dump(mode="json", exclude_none=True), + status=status.HTTP_401_UNAUTHORIZED, + ) + + def _try_authentication(self, request) -> bool: + try: + access_token = request.COOKIES.get(settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME")) + if access_token: + try: + payload = validate_access_token(access_token) + self._set_user_data(request, payload) + return True + except (TokenExpiredError, TokenInvalidError): + pass + + return self._try_refresh(request) + + except (TokenExpiredError, TokenInvalidError) as e: + raise e + except Exception: + return False + + def _try_refresh(self, request) -> bool: + """Try to refresh access token""" + try: + refresh_token = request.COOKIES.get(settings.COOKIE_SETTINGS.get("REFRESH_COOKIE_NAME")) + if not refresh_token: + return False + payload = validate_refresh_token(refresh_token) + + user_data = { + "user_id": payload["user_id"], + } + + new_access_token = generate_access_token(user_data) + + self._set_user_data(request, payload) + + request._new_access_token = new_access_token + request._access_token_expires = settings.JWT_CONFIG["ACCESS_TOKEN_LIFETIME"] + + return True + + except (RefreshTokenExpiredError, TokenInvalidError): + return False + except Exception: + return False + + def _set_user_data(self, request, payload): + """Set user data on request with database verification""" + user_id = payload["user_id"] + user = UserRepository.get_by_id(user_id) + if not user: + raise TokenInvalidError(AuthErrorMessages.INVALID_TOKEN) + + request.user_id = user_id + request.user_email = user.email_id + + def _process_response(self, request, response): + """Process response and set new cookies if token was refreshed""" + if hasattr(request, "_new_access_token"): + config = self._get_cookie_config() + response.set_cookie( + settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME"), + request._new_access_token, + max_age=request._access_token_expires, + **config, + ) + return response + + def _get_cookie_config(self): + """Get cookie configuration""" + return { + "path": "/", + "domain": settings.COOKIE_SETTINGS.get("COOKIE_DOMAIN"), + "secure": settings.COOKIE_SETTINGS.get("COOKIE_SECURE"), + "httponly": True, + "samesite": settings.COOKIE_SETTINGS.get("COOKIE_SAMESITE"), + } + + def _is_public_path(self, path: str) -> bool: + return any(path.startswith(public_path) for public_path in settings.PUBLIC_PATHS) + + def _handle_auth_error(self, exception): + error_response = ApiErrorResponse( + statusCode=status.HTTP_401_UNAUTHORIZED, + message=str(exception), + errors=[ApiErrorDetail(title=ApiErrors.AUTHENTICATION_FAILED, detail=str(exception))], + ) + return JsonResponse( + data=error_response.model_dump(mode="json", exclude_none=True), + status=status.HTTP_401_UNAUTHORIZED, + ) + + +def get_current_user_info(request) -> dict: + if not hasattr(request, "user_id"): + return None + + user_info = { + "user_id": request.user_id, + "email": request.user_email, + } + + return user_info diff --git a/todo/migrations/0001_initial_setup.py b/todo/migrations/0001_initial_setup.py new file mode 100644 index 00000000..19d10164 --- /dev/null +++ b/todo/migrations/0001_initial_setup.py @@ -0,0 +1,448 @@ +# Generated by Django 5.1.5 on 2025-08-23 18:54 + +import django.db.models.deletion +import django.utils.timezone +from django.db import migrations, models + + +class Migration(migrations.Migration): + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="PostgresAuditLog", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("mongo_id", models.CharField(blank=True, max_length=24, null=True, unique=True)), + ("task_id", models.CharField(blank=True, max_length=24, null=True)), + ("team_id", models.CharField(blank=True, max_length=24, null=True)), + ("previous_executor_id", models.CharField(blank=True, max_length=24, null=True)), + ("new_executor_id", models.CharField(blank=True, max_length=24, null=True)), + ("spoc_id", models.CharField(blank=True, max_length=24, null=True)), + ("action", models.CharField(max_length=100)), + ("timestamp", models.DateTimeField(default=django.utils.timezone.now)), + ("status_from", models.CharField(blank=True, max_length=20, null=True)), + ("status_to", models.CharField(blank=True, max_length=20, null=True)), + ("assignee_from", models.CharField(blank=True, max_length=24, null=True)), + ("assignee_to", models.CharField(blank=True, max_length=24, null=True)), + ("performed_by", models.CharField(blank=True, max_length=24, null=True)), + ("last_sync_at", models.DateTimeField(auto_now=True)), + ( + "sync_status", + models.CharField( + choices=[("SYNCED", "Synced"), ("PENDING", "Pending"), ("FAILED", "Failed")], + default="SYNCED", + max_length=20, + ), + ), + ("sync_error", models.TextField(blank=True, null=True)), + ], + options={ + "db_table": "postgres_audit_logs", + "indexes": [ + models.Index(fields=["mongo_id"], name="postgres_au_mongo_i_e01883_idx"), + models.Index(fields=["task_id"], name="postgres_au_task_id_76f799_idx"), + models.Index(fields=["team_id"], name="postgres_au_team_id_aaca90_idx"), + models.Index(fields=["action"], name="postgres_au_action_582248_idx"), + models.Index(fields=["performed_by"], name="postgres_au_perform_f08d1f_idx"), + models.Index(fields=["timestamp"], name="postgres_au_timesta_ee4eef_idx"), + models.Index(fields=["sync_status"], name="postgres_au_sync_st_b7b811_idx"), + ], + }, + ), + migrations.CreateModel( + name="PostgresLabel", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("mongo_id", models.CharField(blank=True, max_length=24, null=True, unique=True)), + ("name", models.CharField(max_length=100, unique=True)), + ("color", models.CharField(default="#000000", max_length=7)), + ("description", models.TextField(blank=True, null=True)), + ("created_at", models.DateTimeField(default=django.utils.timezone.now)), + ("updated_at", models.DateTimeField(blank=True, null=True)), + ("last_sync_at", models.DateTimeField(auto_now=True)), + ( + "sync_status", + models.CharField( + choices=[("SYNCED", "Synced"), ("PENDING", "Pending"), ("FAILED", "Failed")], + default="SYNCED", + max_length=20, + ), + ), + ("sync_error", models.TextField(blank=True, null=True)), + ], + options={ + "db_table": "postgres_labels", + "indexes": [ + models.Index(fields=["mongo_id"], name="postgres_la_mongo_i_f36969_idx"), + models.Index(fields=["name"], name="postgres_la_name_25bdde_idx"), + models.Index(fields=["sync_status"], name="postgres_la_sync_st_f795eb_idx"), + ], + }, + ), + migrations.CreateModel( + name="PostgresRole", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("mongo_id", models.CharField(blank=True, max_length=24, null=True, unique=True)), + ("name", models.CharField(max_length=100, unique=True)), + ("description", models.TextField(blank=True, null=True)), + ("permissions", models.JSONField(default=dict)), + ("created_at", models.DateTimeField(default=django.utils.timezone.now)), + ("updated_at", models.DateTimeField(blank=True, null=True)), + ("last_sync_at", models.DateTimeField(auto_now=True)), + ( + "sync_status", + models.CharField( + choices=[("SYNCED", "Synced"), ("PENDING", "Pending"), ("FAILED", "Failed")], + default="SYNCED", + max_length=20, + ), + ), + ("sync_error", models.TextField(blank=True, null=True)), + ], + options={ + "db_table": "postgres_roles", + "indexes": [ + models.Index(fields=["mongo_id"], name="postgres_ro_mongo_i_018753_idx"), + models.Index(fields=["name"], name="postgres_ro_name_ef794d_idx"), + models.Index(fields=["sync_status"], name="postgres_ro_sync_st_9386cc_idx"), + ], + }, + ), + migrations.CreateModel( + name="PostgresTask", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("mongo_id", models.CharField(blank=True, max_length=24, null=True, unique=True)), + ("display_id", models.CharField(blank=True, max_length=100, null=True)), + ("title", models.CharField(max_length=500)), + ("description", models.TextField(blank=True, null=True)), + ("priority", models.IntegerField(default=3)), + ("status", models.CharField(default="TODO", max_length=20)), + ("is_acknowledged", models.BooleanField(default=False)), + ("is_deleted", models.BooleanField(default=False)), + ("started_at", models.DateTimeField(blank=True, null=True)), + ("due_at", models.DateTimeField(blank=True, null=True)), + ("created_at", models.DateTimeField(default=django.utils.timezone.now)), + ("updated_at", models.DateTimeField(blank=True, null=True)), + ("created_by", models.CharField(max_length=24)), + ("updated_by", models.CharField(blank=True, max_length=24, null=True)), + ("last_sync_at", models.DateTimeField(auto_now=True)), + ( + "sync_status", + models.CharField( + choices=[("SYNCED", "Synced"), ("PENDING", "Pending"), ("FAILED", "Failed")], + default="SYNCED", + max_length=20, + ), + ), + ("sync_error", models.TextField(blank=True, null=True)), + ], + options={ + "db_table": "postgres_tasks", + "indexes": [ + models.Index(fields=["mongo_id"], name="postgres_ta_mongo_i_4bcd8b_idx"), + models.Index(fields=["display_id"], name="postgres_ta_display_0f1eae_idx"), + models.Index(fields=["status"], name="postgres_ta_status_ae228e_idx"), + models.Index(fields=["priority"], name="postgres_ta_priorit_6ea8ac_idx"), + models.Index(fields=["created_by"], name="postgres_ta_created_a5359a_idx"), + models.Index(fields=["due_at"], name="postgres_ta_due_at_45ae89_idx"), + models.Index(fields=["sync_status"], name="postgres_ta_sync_st_e67786_idx"), + ], + }, + ), + migrations.CreateModel( + name="PostgresDeferredDetails", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("deferred_at", models.DateTimeField(blank=True, null=True)), + ("deferred_till", models.DateTimeField(blank=True, null=True)), + ("deferred_by", models.CharField(blank=True, max_length=24, null=True)), + ( + "task", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="deferred_details", + to="todo.postgrestask", + ), + ), + ], + options={ + "db_table": "postgres_deferred_details", + }, + ), + migrations.CreateModel( + name="PostgresTaskAssignment", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("mongo_id", models.CharField(blank=True, max_length=24, null=True, unique=True)), + ("task_mongo_id", models.CharField(max_length=24)), + ("assignee_id", models.CharField(max_length=24)), + ( + "user_type", + models.CharField( + choices=[("user", "User"), ("team", "Team")], + max_length=10, + ), + ), + ("team_id", models.CharField(blank=True, max_length=24, null=True)), + ("is_active", models.BooleanField(default=True)), + ("created_at", models.DateTimeField(default=django.utils.timezone.now)), + ("updated_at", models.DateTimeField(blank=True, null=True)), + ("created_by", models.CharField(max_length=24)), + ("updated_by", models.CharField(blank=True, max_length=24, null=True)), + ("last_sync_at", models.DateTimeField(auto_now=True)), + ( + "sync_status", + models.CharField( + choices=[("SYNCED", "Synced"), ("PENDING", "Pending"), ("FAILED", "Failed")], + default="SYNCED", + max_length=20, + ), + ), + ("sync_error", models.TextField(blank=True, null=True)), + ], + options={ + "db_table": "postgres_task_assignments", + "indexes": [ + models.Index(fields=["mongo_id"], name="postgres_ta_mongo_i_326fa9_idx"), + models.Index(fields=["task_mongo_id"], name="postgres_ta_task_mo_95ca3b_idx"), + models.Index(fields=["assignee_id"], name="postgres_ta_assignee_95ca3b_idx"), + models.Index(fields=["user_type"], name="postgres_ta_user_typ_d13fa3_idx"), + models.Index(fields=["team_id"], name="postgres_ta_team_id_a0605f_idx"), + models.Index(fields=["is_active"], name="postgres_ta_is_acti_8b9698_idx"), + models.Index(fields=["sync_status"], name="postgres_ta_sync_st_385c3f_idx"), + ], + }, + ), + migrations.CreateModel( + name="PostgresTeam", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("mongo_id", models.CharField(blank=True, max_length=24, null=True, unique=True)), + ("name", models.CharField(max_length=100)), + ("description", models.TextField(blank=True, null=True)), + ("invite_code", models.CharField(max_length=100, unique=True)), + ("poc_id", models.CharField(blank=True, max_length=24, null=True)), + ("created_by", models.CharField(max_length=24)), + ("updated_by", models.CharField(max_length=24)), + ("is_deleted", models.BooleanField(default=False)), + ("created_at", models.DateTimeField(default=django.utils.timezone.now)), + ("updated_at", models.DateTimeField(default=django.utils.timezone.now)), + ("last_sync_at", models.DateTimeField(auto_now=True)), + ( + "sync_status", + models.CharField( + choices=[("SYNCED", "Synced"), ("PENDING", "Pending"), ("FAILED", "Failed")], + default="SYNCED", + max_length=20, + ), + ), + ("sync_error", models.TextField(blank=True, null=True)), + ], + options={ + "db_table": "postgres_teams", + "indexes": [ + models.Index(fields=["mongo_id"], name="postgres_te_mongo_i_abc268_idx"), + models.Index(fields=["invite_code"], name="postgres_te_invite__980f9f_idx"), + models.Index(fields=["created_by"], name="postgres_te_created_8f28f6_idx"), + models.Index(fields=["sync_status"], name="postgres_te_sync_st_19c6d6_idx"), + ], + }, + ), + migrations.CreateModel( + name="PostgresTeamCreationInviteCode", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("mongo_id", models.CharField(blank=True, max_length=24, null=True, unique=True)), + ("code", models.CharField(max_length=100, unique=True)), + ("description", models.TextField(blank=True, null=True)), + ("created_by", models.CharField(max_length=24)), + ("used_by", models.CharField(blank=True, max_length=24, null=True)), + ("is_used", models.BooleanField(default=False)), + ("created_at", models.DateTimeField(default=django.utils.timezone.now)), + ("used_at", models.DateTimeField(blank=True, null=True)), + ("last_sync_at", models.DateTimeField(auto_now=True)), + ( + "sync_status", + models.CharField( + choices=[("SYNCED", "Synced"), ("PENDING", "Pending"), ("FAILED", "Failed")], + default="SYNCED", + max_length=20, + ), + ), + ("sync_error", models.TextField(blank=True, null=True)), + ], + options={ + "db_table": "postgres_team_creation_invite_codes", + "indexes": [ + models.Index(fields=["mongo_id"], name="postgres_te_mongo_i_9b5218_idx"), + models.Index(fields=["code"], name="postgres_te_code_e912c2_idx"), + models.Index(fields=["created_by"], name="postgres_te_created_cc1648_idx"), + models.Index(fields=["is_used"], name="postgres_te_is_used_23eea1_idx"), + models.Index(fields=["sync_status"], name="postgres_te_sync_st_0225fb_idx"), + ], + }, + ), + migrations.CreateModel( + name="PostgresUser", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("mongo_id", models.CharField(blank=True, max_length=24, null=True, unique=True)), + ("google_id", models.CharField(max_length=255, unique=True)), + ("email_id", models.EmailField(max_length=254, unique=True)), + ("name", models.CharField(max_length=255)), + ("picture", models.URLField(blank=True, max_length=500, null=True)), + ("created_at", models.DateTimeField(default=django.utils.timezone.now)), + ("updated_at", models.DateTimeField(blank=True, null=True)), + ("last_sync_at", models.DateTimeField(auto_now=True)), + ( + "sync_status", + models.CharField( + choices=[("SYNCED", "Synced"), ("PENDING", "Pending"), ("FAILED", "Failed")], + default="SYNCED", + max_length=20, + ), + ), + ("sync_error", models.TextField(blank=True, null=True)), + ], + options={ + "db_table": "postgres_users", + "indexes": [ + models.Index(fields=["mongo_id"], name="postgres_us_mongo_i_b7de3d_idx"), + models.Index(fields=["google_id"], name="postgres_us_google__842c47_idx"), + models.Index(fields=["email_id"], name="postgres_us_email_i_fde0e2_idx"), + models.Index(fields=["sync_status"], name="postgres_us_sync_st_4b81bc_idx"), + ], + }, + ), + migrations.CreateModel( + name="PostgresUserRole", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("mongo_id", models.CharField(blank=True, max_length=24, null=True, unique=True)), + ("user_id", models.CharField(max_length=24)), + ("role_name", models.CharField(max_length=50)), + ("scope", models.CharField(max_length=20)), + ("team_id", models.CharField(blank=True, max_length=24, null=True)), + ("is_active", models.BooleanField(default=True)), + ("created_at", models.DateTimeField(default=django.utils.timezone.now)), + ("created_by", models.CharField(default="system", max_length=24)), + ("last_sync_at", models.DateTimeField(auto_now=True)), + ( + "sync_status", + models.CharField( + choices=[("SYNCED", "Synced"), ("PENDING", "Pending"), ("FAILED", "Failed")], + default="SYNCED", + max_length=20, + ), + ), + ("sync_error", models.TextField(blank=True, null=True)), + ], + options={ + "db_table": "postgres_user_roles", + "indexes": [ + models.Index(fields=["mongo_id"], name="postgres_us_mongo_i_a0b3f8_idx"), + models.Index(fields=["user_id"], name="postgres_us_user_id_e6b62a_idx"), + models.Index(fields=["role_name"], name="postgres_us_role_na_7ec8fa_idx"), + models.Index(fields=["scope"], name="postgres_us_scope_f92854_idx"), + models.Index(fields=["team_id"], name="postgres_us_team_id_90ff18_idx"), + models.Index(fields=["is_active"], name="postgres_us_is_acti_558107_idx"), + models.Index(fields=["sync_status"], name="postgres_us_sync_st_58315c_idx"), + ], + "unique_together": {("user_id", "role_name", "scope", "team_id")}, + }, + ), + migrations.CreateModel( + name="PostgresUserTeamDetails", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("mongo_id", models.CharField(blank=True, max_length=24, null=True, unique=True)), + ("user_id", models.CharField(max_length=24)), + ("team_id", models.CharField(max_length=24)), + ("created_by", models.CharField(max_length=24)), + ("updated_by", models.CharField(max_length=24)), + ("is_active", models.BooleanField(default=True)), + ("created_at", models.DateTimeField(default=django.utils.timezone.now)), + ("updated_at", models.DateTimeField(default=django.utils.timezone.now)), + ("last_sync_at", models.DateTimeField(auto_now=True)), + ( + "sync_status", + models.CharField( + choices=[("SYNCED", "Synced"), ("PENDING", "Pending"), ("FAILED", "Failed")], + default="SYNCED", + max_length=20, + ), + ), + ("sync_error", models.TextField(blank=True, null=True)), + ], + options={ + "db_table": "postgres_user_team_details", + "indexes": [ + models.Index(fields=["mongo_id"], name="postgres_us_mongo_i_c533ba_idx"), + models.Index(fields=["user_id"], name="postgres_us_user_id_50613a_idx"), + models.Index(fields=["team_id"], name="postgres_us_team_id_468318_idx"), + models.Index(fields=["is_active"], name="postgres_us_is_acti_a58a6c_idx"), + models.Index(fields=["sync_status"], name="postgres_us_sync_st_bbef4a_idx"), + ], + "unique_together": {("user_id", "team_id")}, + }, + ), + migrations.CreateModel( + name="PostgresWatchlist", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("mongo_id", models.CharField(blank=True, max_length=24, null=True, unique=True)), + ("task_id", models.CharField(max_length=24)), + ("user_id", models.CharField(max_length=24)), + ("is_active", models.BooleanField(default=True)), + ("created_by", models.CharField(max_length=24)), + ("created_at", models.DateTimeField(default=django.utils.timezone.now)), + ("updated_by", models.CharField(blank=True, max_length=24, null=True)), + ("updated_at", models.DateTimeField(blank=True, null=True)), + ("last_sync_at", models.DateTimeField(auto_now=True)), + ( + "sync_status", + models.CharField( + choices=[("SYNCED", "Synced"), ("PENDING", "Pending"), ("FAILED", "Failed")], + default="SYNCED", + max_length=20, + ), + ), + ("sync_error", models.TextField(blank=True, null=True)), + ], + options={ + "db_table": "postgres_watchlist", + "indexes": [ + models.Index(fields=["mongo_id"], name="postgres_wa_mongo_i_5c0868_idx"), + models.Index(fields=["task_id"], name="postgres_wa_task_id_adb0e4_idx"), + models.Index(fields=["user_id"], name="postgres_wa_user_id_71c384_idx"), + models.Index(fields=["is_active"], name="postgres_wa_is_acti_ae4d9b_idx"), + models.Index(fields=["sync_status"], name="postgres_wa_sync_st_29bd9a_idx"), + models.Index(fields=["user_id", "task_id"], name="postgres_wa_user_id_c1421a_idx"), + ], + "unique_together": {("user_id", "task_id")}, + }, + ), + migrations.CreateModel( + name="PostgresTaskLabel", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("label_mongo_id", models.CharField(max_length=24)), + ( + "task", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, related_name="task_labels", to="todo.postgrestask" + ), + ), + ], + options={ + "db_table": "postgres_task_labels", + "indexes": [models.Index(fields=["label_mongo_id"], name="postgres_ta_label_m_8f146d_idx")], + "unique_together": {("task", "label_mongo_id")}, + }, + ), + ] diff --git a/todo/migrations/0002_rename_postgres_ta_assignee_95ca3b_idx_postgres_ta_assigne_f1c6e7_idx_and_more.py b/todo/migrations/0002_rename_postgres_ta_assignee_95ca3b_idx_postgres_ta_assigne_f1c6e7_idx_and_more.py new file mode 100644 index 00000000..d402a6e8 --- /dev/null +++ b/todo/migrations/0002_rename_postgres_ta_assignee_95ca3b_idx_postgres_ta_assigne_f1c6e7_idx_and_more.py @@ -0,0 +1,32 @@ +# Generated by Django 5.1.5 on 2025-08-28 21:22 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("todo", "0001_initial_setup"), + ] + + operations = [ + migrations.RenameIndex( + model_name="postgrestaskassignment", + new_name="postgres_ta_assigne_f1c6e7_idx", + old_name="postgres_ta_assignee_95ca3b_idx", + ), + migrations.RenameIndex( + model_name="postgrestaskassignment", + new_name="postgres_ta_user_ty_5664c0_idx", + old_name="postgres_ta_user_typ_d13fa3_idx", + ), + migrations.RenameIndex( + model_name="postgrestaskassignment", + new_name="postgres_ta_team_id_982105_idx", + old_name="postgres_ta_team_id_a0605f_idx", + ), + migrations.RenameIndex( + model_name="postgrestaskassignment", + new_name="postgres_ta_is_acti_8882a6_idx", + old_name="postgres_ta_is_acti_8b9698_idx", + ), + ] diff --git a/todo/migrations/__init__.py b/todo/migrations/__init__.py new file mode 100644 index 00000000..c4696eb1 --- /dev/null +++ b/todo/migrations/__init__.py @@ -0,0 +1 @@ +# This file makes the migrations directory a Python package diff --git a/todo/models/audit_log.py b/todo/models/audit_log.py new file mode 100644 index 00000000..aa17809c --- /dev/null +++ b/todo/models/audit_log.py @@ -0,0 +1,25 @@ +from datetime import datetime, timezone +from typing import ClassVar +from pydantic import Field +from todo.models.common.document import Document +from todo.models.common.pyobjectid import PyObjectId + + +class AuditLogModel(Document): + collection_name: ClassVar[str] = "audit_logs" + + task_id: PyObjectId | None = None + team_id: PyObjectId | None = None + previous_executor_id: PyObjectId | None = None + new_executor_id: PyObjectId | None = None + spoc_id: PyObjectId | None = None + action: str # e.g., "assigned_to_team", "unassigned_from_team", "status_changed", "reassign_executor", "team_created", "member_joined_team", "member_added_to_team", "member_removed_from_team", "team_updated" + timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) + # For status changes + status_from: str | None = None + status_to: str | None = None + # For assignment changes + assignee_from: PyObjectId | None = None + assignee_to: PyObjectId | None = None + # For general user reference (who performed the action) + performed_by: PyObjectId | None = None diff --git a/todo/models/label.py b/todo/models/label.py index 625ed075..fd63093a 100644 --- a/todo/models/label.py +++ b/todo/models/label.py @@ -1,5 +1,6 @@ from datetime import datetime from typing import ClassVar + from todo.models.common.document import Document diff --git a/todo/models/postgres/__init__.py b/todo/models/postgres/__init__.py new file mode 100644 index 00000000..7f961f4d --- /dev/null +++ b/todo/models/postgres/__init__.py @@ -0,0 +1,28 @@ +# Postgres models package for dual-write system + +from .user import PostgresUser +from .task import PostgresTask, PostgresTaskLabel, PostgresDeferredDetails +from .team import PostgresTeam, PostgresUserTeamDetails +from .label import PostgresLabel +from .role import PostgresRole +from .task_assignment import PostgresTaskAssignment +from .watchlist import PostgresWatchlist +from .user_role import PostgresUserRole +from .audit_log import PostgresAuditLog +from .team_creation_invite_code import PostgresTeamCreationInviteCode + +__all__ = [ + "PostgresUser", + "PostgresTask", + "PostgresTaskLabel", + "PostgresDeferredDetails", + "PostgresTeam", + "PostgresUserTeamDetails", + "PostgresLabel", + "PostgresRole", + "PostgresTaskAssignment", + "PostgresWatchlist", + "PostgresUserRole", + "PostgresAuditLog", + "PostgresTeamCreationInviteCode", +] diff --git a/todo/models/postgres/audit_log.py b/todo/models/postgres/audit_log.py new file mode 100644 index 00000000..c57b281c --- /dev/null +++ b/todo/models/postgres/audit_log.py @@ -0,0 +1,46 @@ +from django.db import models +from django.utils import timezone + + +class PostgresAuditLog(models.Model): + mongo_id = models.CharField(max_length=24, unique=True, null=True, blank=True) + + task_id = models.CharField(max_length=24, null=True, blank=True) + team_id = models.CharField(max_length=24, null=True, blank=True) + previous_executor_id = models.CharField(max_length=24, null=True, blank=True) + new_executor_id = models.CharField(max_length=24, null=True, blank=True) + spoc_id = models.CharField(max_length=24, null=True, blank=True) + action = models.CharField(max_length=100) + timestamp = models.DateTimeField(default=timezone.now) + status_from = models.CharField(max_length=20, null=True, blank=True) + status_to = models.CharField(max_length=20, null=True, blank=True) + assignee_from = models.CharField(max_length=24, null=True, blank=True) + assignee_to = models.CharField(max_length=24, null=True, blank=True) + performed_by = models.CharField(max_length=24, null=True, blank=True) + + last_sync_at = models.DateTimeField(auto_now=True) + sync_status = models.CharField( + max_length=20, + choices=[ + ("SYNCED", "Synced"), + ("PENDING", "Pending"), + ("FAILED", "Failed"), + ], + default="SYNCED", + ) + sync_error = models.TextField(null=True, blank=True) + + class Meta: + db_table = "postgres_audit_logs" + indexes = [ + models.Index(fields=["mongo_id"]), + models.Index(fields=["task_id"]), + models.Index(fields=["team_id"]), + models.Index(fields=["action"]), + models.Index(fields=["performed_by"]), + models.Index(fields=["timestamp"]), + models.Index(fields=["sync_status"]), + ] + + def __str__(self): + return f"{self.action} on task {self.task_id}" diff --git a/todo/models/postgres/label.py b/todo/models/postgres/label.py new file mode 100644 index 00000000..128ad9ad --- /dev/null +++ b/todo/models/postgres/label.py @@ -0,0 +1,50 @@ +from django.db import models +from django.utils import timezone + + +class PostgresLabel(models.Model): + """ + Postgres model for labels. + """ + + # MongoDB ObjectId as string for reference + mongo_id = models.CharField(max_length=24, unique=True, null=True, blank=True) + + # Label fields + name = models.CharField(max_length=100, unique=True) + color = models.CharField(max_length=7, default="#000000") # Hex color code + description = models.TextField(null=True, blank=True) + + # Timestamps + created_at = models.DateTimeField(default=timezone.now) + updated_at = models.DateTimeField(null=True, blank=True) + + # Sync metadata + last_sync_at = models.DateTimeField(auto_now=True) + sync_status = models.CharField( + max_length=20, + choices=[ + ("SYNCED", "Synced"), + ("PENDING", "Pending"), + ("FAILED", "Failed"), + ], + default="SYNCED", + ) + sync_error = models.TextField(null=True, blank=True) + + class Meta: + db_table = "postgres_labels" + indexes = [ + models.Index(fields=["mongo_id"]), + models.Index(fields=["name"]), + models.Index(fields=["sync_status"]), + ] + + def __str__(self): + return self.name + + def save(self, *args, **kwargs): + if not self.pk: # New instance + self.created_at = timezone.now() + self.updated_at = timezone.now() + super().save(*args, **kwargs) diff --git a/todo/models/postgres/role.py b/todo/models/postgres/role.py new file mode 100644 index 00000000..363de6bd --- /dev/null +++ b/todo/models/postgres/role.py @@ -0,0 +1,50 @@ +from django.db import models +from django.utils import timezone + + +class PostgresRole(models.Model): + """ + Postgres model for roles. + """ + + # MongoDB ObjectId as string for reference + mongo_id = models.CharField(max_length=24, unique=True, null=True, blank=True) + + # Role fields + name = models.CharField(max_length=100, unique=True) + description = models.TextField(null=True, blank=True) + permissions = models.JSONField(default=dict) # Store permissions as JSON + + # Timestamps + created_at = models.DateTimeField(default=timezone.now) + updated_at = models.DateTimeField(null=True, blank=True) + + # Sync metadata + last_sync_at = models.DateTimeField(auto_now=True) + sync_status = models.CharField( + max_length=20, + choices=[ + ("SYNCED", "Synced"), + ("PENDING", "Pending"), + ("FAILED", "Failed"), + ], + default="SYNCED", + ) + sync_error = models.TextField(null=True, blank=True) + + class Meta: + db_table = "postgres_roles" + indexes = [ + models.Index(fields=["mongo_id"]), + models.Index(fields=["name"]), + models.Index(fields=["sync_status"]), + ] + + def __str__(self): + return self.name + + def save(self, *args, **kwargs): + if not self.pk: # New instance + self.created_at = timezone.now() + self.updated_at = timezone.now() + super().save(*args, **kwargs) diff --git a/todo/models/postgres/task.py b/todo/models/postgres/task.py new file mode 100644 index 00000000..8b833fa8 --- /dev/null +++ b/todo/models/postgres/task.py @@ -0,0 +1,99 @@ +from django.db import models +from django.utils import timezone + + +class PostgresTask(models.Model): + """ + Postgres model for tasks, mirroring MongoDB TaskModel structure. + This enables future migration from MongoDB to Postgres. + """ + + # MongoDB ObjectId as string for reference + mongo_id = models.CharField(max_length=24, unique=True, null=True, blank=True) + + # Task fields + display_id = models.CharField(max_length=100, null=True, blank=True) + title = models.CharField(max_length=500) + description = models.TextField(null=True, blank=True) + + # Store the same format as MongoDB (integer for priority, string for status) + priority = models.IntegerField(default=3) # 1=HIGH, 2=MEDIUM, 3=LOW + status = models.CharField(max_length=20, default="TODO") + + # Boolean fields + is_acknowledged = models.BooleanField(default=False) + is_deleted = models.BooleanField(default=False) + + # Timestamps + started_at = models.DateTimeField(null=True, blank=True) + due_at = models.DateTimeField(null=True, blank=True) + created_at = models.DateTimeField(default=timezone.now) + updated_at = models.DateTimeField(null=True, blank=True) + + # References (as strings for now, will be foreign keys in future) + created_by = models.CharField(max_length=24) # MongoDB ObjectId as string + updated_by = models.CharField(max_length=24, null=True, blank=True) + + # Sync metadata + last_sync_at = models.DateTimeField(auto_now=True) + sync_status = models.CharField( + max_length=20, + choices=[ + ("SYNCED", "Synced"), + ("PENDING", "Pending"), + ("FAILED", "Failed"), + ], + default="SYNCED", + ) + sync_error = models.TextField(null=True, blank=True) + + class Meta: + db_table = "postgres_tasks" + indexes = [ + models.Index(fields=["mongo_id"]), + models.Index(fields=["display_id"]), + models.Index(fields=["status"]), + models.Index(fields=["priority"]), + models.Index(fields=["created_by"]), + models.Index(fields=["due_at"]), + models.Index(fields=["sync_status"]), + ] + + def __str__(self): + return f"{self.title} ({self.display_id or 'N/A'})" + + def save(self, *args, **kwargs): + if not self.pk: # New instance + self.created_at = timezone.now() + self.updated_at = timezone.now() + super().save(*args, **kwargs) + + +class PostgresTaskLabel(models.Model): + """ + Junction table for task-label relationships. + """ + + task = models.ForeignKey(PostgresTask, on_delete=models.CASCADE, related_name="task_labels") + label_mongo_id = models.CharField(max_length=24) # MongoDB ObjectId as string + + class Meta: + db_table = "postgres_task_labels" + unique_together = ["task", "label_mongo_id"] + indexes = [ + models.Index(fields=["label_mongo_id"]), + ] + + +class PostgresDeferredDetails(models.Model): + """ + Model for deferred task details. + """ + + task = models.OneToOneField(PostgresTask, on_delete=models.CASCADE, related_name="deferred_details") + deferred_at = models.DateTimeField(null=True, blank=True) + deferred_till = models.DateTimeField(null=True, blank=True) + deferred_by = models.CharField(max_length=24, null=True, blank=True) # MongoDB ObjectId as string + + class Meta: + db_table = "postgres_deferred_details" diff --git a/todo/models/postgres/task_assignment.py b/todo/models/postgres/task_assignment.py new file mode 100644 index 00000000..e341ac8f --- /dev/null +++ b/todo/models/postgres/task_assignment.py @@ -0,0 +1,62 @@ +from django.db import models +from django.utils import timezone + + +class PostgresTaskAssignment(models.Model): + """ + Postgres model for task assignments. + """ + + # MongoDB ObjectId as string for reference + mongo_id = models.CharField(max_length=24, unique=True, null=True, blank=True) + + # Assignment fields + task_mongo_id = models.CharField(max_length=24) # MongoDB ObjectId as string + assignee_id = models.CharField(max_length=24) # MongoDB ObjectId as string (user or team ID) + user_type = models.CharField(max_length=10, choices=[("user", "User"), ("team", "Team")]) # user or team + team_id = models.CharField( + max_length=24, null=True, blank=True + ) # MongoDB ObjectId as string (only for team assignments) + is_active = models.BooleanField(default=True) # Match MongoDB approach + + # Timestamps + created_at = models.DateTimeField(default=timezone.now) + updated_at = models.DateTimeField(null=True, blank=True) + + # References + created_by = models.CharField(max_length=24) # MongoDB ObjectId as string + updated_by = models.CharField(max_length=24, null=True, blank=True) # MongoDB ObjectId as string + + # Sync metadata + last_sync_at = models.DateTimeField(auto_now=True) + sync_status = models.CharField( + max_length=20, + choices=[ + ("SYNCED", "Synced"), + ("PENDING", "Pending"), + ("FAILED", "Failed"), + ], + default="SYNCED", + ) + sync_error = models.TextField(null=True, blank=True) + + class Meta: + db_table = "postgres_task_assignments" + indexes = [ + models.Index(fields=["mongo_id"]), + models.Index(fields=["task_mongo_id"]), + models.Index(fields=["assignee_id"]), + models.Index(fields=["user_type"]), + models.Index(fields=["team_id"]), + models.Index(fields=["is_active"]), + models.Index(fields=["sync_status"]), + ] + + def __str__(self): + return f"Task {self.task_mongo_id} assigned to {self.user_type} {self.assignee_id}" + + def save(self, *args, **kwargs): + if not self.pk: # New instance + self.created_at = timezone.now() + self.updated_at = timezone.now() + super().save(*args, **kwargs) diff --git a/todo/models/postgres/team.py b/todo/models/postgres/team.py new file mode 100644 index 00000000..b71ec4f6 --- /dev/null +++ b/todo/models/postgres/team.py @@ -0,0 +1,115 @@ +from django.db import models +from django.utils import timezone + + +class PostgresTeam(models.Model): + """ + Postgres model for teams, mirroring MongoDB TeamModel structure. + This enables future migration from MongoDB to Postgres. + """ + + # MongoDB ObjectId as string for reference + mongo_id = models.CharField(max_length=24, unique=True, null=True, blank=True) + + # Team fields + name = models.CharField(max_length=100) + description = models.TextField(null=True, blank=True) + invite_code = models.CharField(max_length=100, unique=True) + + # References (as strings for now, will be foreign keys in future) + poc_id = models.CharField(max_length=24, null=True, blank=True) # MongoDB ObjectId as string + created_by = models.CharField(max_length=24) # MongoDB ObjectId as string + updated_by = models.CharField(max_length=24) # MongoDB ObjectId as string + + # Boolean fields + is_deleted = models.BooleanField(default=False) + + # Timestamps + created_at = models.DateTimeField(default=timezone.now) + updated_at = models.DateTimeField(default=timezone.now) + + # Sync metadata + last_sync_at = models.DateTimeField(auto_now=True) + sync_status = models.CharField( + max_length=20, + choices=[ + ("SYNCED", "Synced"), + ("PENDING", "Pending"), + ("FAILED", "Failed"), + ], + default="SYNCED", + ) + sync_error = models.TextField(null=True, blank=True) + + class Meta: + db_table = "postgres_teams" + indexes = [ + models.Index(fields=["mongo_id"]), + models.Index(fields=["invite_code"]), + models.Index(fields=["created_by"]), + models.Index(fields=["sync_status"]), + ] + + def __str__(self): + return self.name + + def save(self, *args, **kwargs): + if not self.pk: # New instance + self.created_at = timezone.now() + self.updated_at = timezone.now() + super().save(*args, **kwargs) + + +class PostgresUserTeamDetails(models.Model): + """ + Postgres model for user-team relationships, mirroring MongoDB UserTeamDetailsModel structure. + """ + + # MongoDB ObjectId as string for reference + mongo_id = models.CharField(max_length=24, unique=True, null=True, blank=True) + + # References (as strings for now, will be foreign keys in future) + user_id = models.CharField(max_length=24) # MongoDB ObjectId as string + team_id = models.CharField(max_length=24) # MongoDB ObjectId as string + created_by = models.CharField(max_length=24) # MongoDB ObjectId as string + updated_by = models.CharField(max_length=24) # MongoDB ObjectId as string + + # Boolean fields + is_active = models.BooleanField(default=True) + + # Timestamps + created_at = models.DateTimeField(default=timezone.now) + updated_at = models.DateTimeField(default=timezone.now) + + # Sync metadata + last_sync_at = models.DateTimeField(auto_now=True) + sync_status = models.CharField( + max_length=20, + choices=[ + ("SYNCED", "Synced"), + ("PENDING", "Pending"), + ("FAILED", "Failed"), + ], + default="SYNCED", + ) + sync_error = models.TextField(null=True, blank=True) + + class Meta: + db_table = "postgres_user_team_details" + unique_together = ["user_id", "team_id"] + indexes = [ + models.Index(fields=["mongo_id"]), + models.Index(fields=["user_id"]), + models.Index(fields=["team_id"]), + models.Index(fields=["is_active"]), + models.Index(fields=["sync_status"]), + ] + + def __str__(self): + return f"User {self.user_id} in Team {self.team_id}" + + def save(self, *args, **kwargs): + if not self.pk: # New instance + self.created_at = timezone.now() + self.updated_at = timezone.now() + super().save(*args, **kwargs) diff --git a/todo/models/postgres/team_creation_invite_code.py b/todo/models/postgres/team_creation_invite_code.py new file mode 100644 index 00000000..827c4757 --- /dev/null +++ b/todo/models/postgres/team_creation_invite_code.py @@ -0,0 +1,56 @@ +from django.db import models +from django.utils import timezone + + +class PostgresTeamCreationInviteCode(models.Model): + """ + Postgres model for team creation invite codes, mirroring MongoDB TeamCreationInviteCodeModel structure. + This enables future migration from MongoDB to Postgres. + """ + + # MongoDB ObjectId as string for reference + mongo_id = models.CharField(max_length=24, unique=True, null=True, blank=True) + + # Invite code fields + code = models.CharField(max_length=100, unique=True) + description = models.TextField(null=True, blank=True) + + # User references + created_by = models.CharField(max_length=24) + used_by = models.CharField(max_length=24, null=True, blank=True) + + # Status and timestamps + is_used = models.BooleanField(default=False) + created_at = models.DateTimeField(default=timezone.now) + used_at = models.DateTimeField(null=True, blank=True) + + # Sync metadata + last_sync_at = models.DateTimeField(auto_now=True) + sync_status = models.CharField( + max_length=20, + choices=[ + ("SYNCED", "Synced"), + ("PENDING", "Pending"), + ("FAILED", "Failed"), + ], + default="SYNCED", + ) + sync_error = models.TextField(null=True, blank=True) + + class Meta: + db_table = "postgres_team_creation_invite_codes" + indexes = [ + models.Index(fields=["mongo_id"]), + models.Index(fields=["code"]), + models.Index(fields=["created_by"]), + models.Index(fields=["is_used"]), + models.Index(fields=["sync_status"]), + ] + + def __str__(self): + return f"Invite Code: {self.code} ({'Used' if self.is_used else 'Unused'})" + + def save(self, *args, **kwargs): + if not self.pk: # New instance + self.created_at = timezone.now() + super().save(*args, **kwargs) diff --git a/todo/models/postgres/user.py b/todo/models/postgres/user.py new file mode 100644 index 00000000..4fff61b9 --- /dev/null +++ b/todo/models/postgres/user.py @@ -0,0 +1,53 @@ +from django.db import models +from django.utils import timezone + + +class PostgresUser(models.Model): + """ + Postgres model for users, mirroring MongoDB UserModel structure. + This enables future migration from MongoDB to Postgres. + """ + + # MongoDB ObjectId as string for reference + mongo_id = models.CharField(max_length=24, unique=True, null=True, blank=True) + + # User fields + google_id = models.CharField(max_length=255, unique=True) + email_id = models.EmailField(unique=True) + name = models.CharField(max_length=255) + picture = models.URLField(max_length=500, null=True, blank=True) + + # Timestamps + created_at = models.DateTimeField(default=timezone.now) + updated_at = models.DateTimeField(null=True, blank=True) + + # Sync metadata + last_sync_at = models.DateTimeField(auto_now=True) + sync_status = models.CharField( + max_length=20, + choices=[ + ("SYNCED", "Synced"), + ("PENDING", "Pending"), + ("FAILED", "Failed"), + ], + default="SYNCED", + ) + sync_error = models.TextField(null=True, blank=True) + + class Meta: + db_table = "postgres_users" + indexes = [ + models.Index(fields=["mongo_id"]), + models.Index(fields=["google_id"]), + models.Index(fields=["email_id"]), + models.Index(fields=["sync_status"]), + ] + + def __str__(self): + return f"{self.name} ({self.email_id})" + + def save(self, *args, **kwargs): + if not self.pk: # New instance + self.created_at = timezone.now() + self.updated_at = timezone.now() + super().save(*args, **kwargs) diff --git a/todo/models/postgres/user_role.py b/todo/models/postgres/user_role.py new file mode 100644 index 00000000..9358e1f8 --- /dev/null +++ b/todo/models/postgres/user_role.py @@ -0,0 +1,42 @@ +from django.db import models +from django.utils import timezone + + +class PostgresUserRole(models.Model): + mongo_id = models.CharField(max_length=24, unique=True, null=True, blank=True) + + user_id = models.CharField(max_length=24) + role_name = models.CharField(max_length=50) + scope = models.CharField(max_length=20) + team_id = models.CharField(max_length=24, null=True, blank=True) + is_active = models.BooleanField(default=True) + created_at = models.DateTimeField(default=timezone.now) + created_by = models.CharField(max_length=24, default="system") + + last_sync_at = models.DateTimeField(auto_now=True) + sync_status = models.CharField( + max_length=20, + choices=[ + ("SYNCED", "Synced"), + ("PENDING", "Pending"), + ("FAILED", "Failed"), + ], + default="SYNCED", + ) + sync_error = models.TextField(null=True, blank=True) + + class Meta: + db_table = "postgres_user_roles" + unique_together = ["user_id", "role_name", "scope", "team_id"] + indexes = [ + models.Index(fields=["mongo_id"]), + models.Index(fields=["user_id"]), + models.Index(fields=["role_name"]), + models.Index(fields=["scope"]), + models.Index(fields=["team_id"]), + models.Index(fields=["is_active"]), + models.Index(fields=["sync_status"]), + ] + + def __str__(self): + return f"User {self.user_id} has Role {self.role_name} ({self.scope})" diff --git a/todo/models/postgres/watchlist.py b/todo/models/postgres/watchlist.py new file mode 100644 index 00000000..777242de --- /dev/null +++ b/todo/models/postgres/watchlist.py @@ -0,0 +1,59 @@ +from django.db import models +from django.utils import timezone + + +class PostgresWatchlist(models.Model): + """ + Postgres model for watchlists that matches MongoDB schema. + This represents a user watching a specific task. + """ + + # MongoDB ObjectId as string for reference + mongo_id = models.CharField(max_length=24, unique=True, null=True, blank=True) + + # Core watchlist fields matching MongoDB schema + task_id = models.CharField(max_length=24) # MongoDB ObjectId as string + user_id = models.CharField(max_length=24) # MongoDB ObjectId as string + is_active = models.BooleanField(default=True) + + # Audit fields + created_by = models.CharField(max_length=24) # MongoDB ObjectId as string + created_at = models.DateTimeField(default=timezone.now) + updated_by = models.CharField(max_length=24, null=True, blank=True) # MongoDB ObjectId as string + updated_at = models.DateTimeField(null=True, blank=True) + + # Sync metadata for dual write system + last_sync_at = models.DateTimeField(auto_now=True) + sync_status = models.CharField( + max_length=20, + choices=[ + ("SYNCED", "Synced"), + ("PENDING", "Pending"), + ("FAILED", "Failed"), + ], + default="SYNCED", + ) + sync_error = models.TextField(null=True, blank=True) + + class Meta: + db_table = "postgres_watchlist" + indexes = [ + models.Index(fields=["mongo_id"]), + models.Index(fields=["task_id"]), + models.Index(fields=["user_id"]), + models.Index(fields=["is_active"]), + models.Index(fields=["sync_status"]), + # Composite index for efficient queries + models.Index(fields=["user_id", "task_id"]), + ] + # Ensure unique user-task combination + unique_together = ["user_id", "task_id"] + + def __str__(self): + return f"Watchlist: User {self.user_id} -> Task {self.task_id}" + + def save(self, *args, **kwargs): + if not self.pk: # New instance + self.created_at = timezone.now() + self.updated_at = timezone.now() + super().save(*args, **kwargs) diff --git a/todo/models/role.py b/todo/models/role.py new file mode 100644 index 00000000..fd851bb5 --- /dev/null +++ b/todo/models/role.py @@ -0,0 +1,23 @@ +from pydantic import Field, ConfigDict +from typing import ClassVar +from datetime import datetime + +from todo.constants.role import RoleScope, RoleName +from todo.models.common.document import Document +from todo.models.common.pyobjectid import PyObjectId + + +class RoleModel(Document): + collection_name: ClassVar[str] = "roles" + + id: PyObjectId | None = Field(None, alias="_id") + name: RoleName + description: str | None = None + scope: RoleScope = RoleScope.GLOBAL + is_active: bool = True + created_by: str + created_at: datetime + updated_by: str | None = None + updated_at: datetime | None = None + + model_config = ConfigDict(ser_enum="value", from_attributes=True, populate_by_name=True, use_enum_values=True) diff --git a/todo/models/task.py b/todo/models/task.py index b60e5ffc..be4e6f32 100644 --- a/todo/models/task.py +++ b/todo/models/task.py @@ -29,7 +29,6 @@ class TaskModel(Document): description: str | None = None priority: TaskPriority | None = TaskPriority.LOW status: TaskStatus | None = TaskStatus.TODO - assignee: str | None = None isAcknowledged: bool = False labels: List[PyObjectId] | None = [] isDeleted: bool = False diff --git a/todo/models/task_assignment.py b/todo/models/task_assignment.py new file mode 100644 index 00000000..8425df4b --- /dev/null +++ b/todo/models/task_assignment.py @@ -0,0 +1,41 @@ +from pydantic import Field, validator +from typing import ClassVar, Literal +from datetime import datetime, timezone +from bson import ObjectId + +from todo.models.common.document import Document +from todo.models.common.pyobjectid import PyObjectId + + +class TaskAssignmentModel(Document): + """ + Model for task assignments to users or teams. + """ + + collection_name: ClassVar[str] = "task_details" + + id: PyObjectId | None = Field(None, alias="_id") + task_id: PyObjectId + assignee_id: PyObjectId # Can be either team_id or user_id + user_type: Literal["user", "team"] # Changed from relation_type to user_type as requested + is_active: bool = True + created_by: PyObjectId + updated_by: PyObjectId | None = None + created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) + updated_at: datetime | None = None + executor_id: PyObjectId | None = None # User within the team who is executing the task + team_id: PyObjectId | None = None # Track the original team when reassigned from team to user + + @validator("task_id", "assignee_id", "created_by", "updated_by", "team_id") + def validate_object_ids(cls, v): + if v is None: + return v + if not ObjectId.is_valid(v): + raise ValueError(f"Invalid ObjectId: {v}") + return ObjectId(v) + + @validator("user_type") + def validate_user_type(cls, v): + if v not in ["user", "team"]: + raise ValueError("user_type must be either 'user' or 'team'") + return v diff --git a/todo/models/team.py b/todo/models/team.py new file mode 100644 index 00000000..a11a0828 --- /dev/null +++ b/todo/models/team.py @@ -0,0 +1,63 @@ +from bson import ObjectId +from pydantic import Field, validator +from typing import ClassVar +from datetime import datetime, timezone + +from todo.models.common.document import Document +from todo.models.common.pyobjectid import PyObjectId + + +class ObjectIdValidatorMixin: + @classmethod + def validate_object_id(cls, v): + if v is None: + raise ValueError("Object ID cannot be None") + if not PyObjectId.is_valid(v): + raise ValueError(f"Invalid Object ID format: {v}") + return v + + +class TeamModel(Document, ObjectIdValidatorMixin): + """ + Model for teams. + """ + + collection_name: ClassVar[str] = "teams" + + name: str = Field(..., min_length=1, max_length=100) + description: str | None = None + poc_id: PyObjectId | None = None + invite_code: str + created_by: PyObjectId + updated_by: PyObjectId + created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) + updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) + is_deleted: bool = False + + @validator("created_by", "updated_by", "poc_id") + def validate_object_id(cls, v): + if v is None: + return v + if not ObjectId.is_valid(v): + raise ValueError(f"Invalid ObjectId: {v}") + return ObjectId(v) + + +class UserTeamDetailsModel(Document, ObjectIdValidatorMixin): + """ + Model for user-team relationships. + """ + + collection_name: ClassVar[str] = "user_team_details" + + user_id: PyObjectId + team_id: PyObjectId + is_active: bool = True + created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) + updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) + created_by: PyObjectId + updated_by: PyObjectId + + @validator("user_id", "team_id", "created_by", "updated_by") + def validate_object_ids(cls, v): + return cls.validate_object_id(v) diff --git a/todo/models/team_creation_invite_code.py b/todo/models/team_creation_invite_code.py new file mode 100644 index 00000000..23c692e6 --- /dev/null +++ b/todo/models/team_creation_invite_code.py @@ -0,0 +1,31 @@ +from bson import ObjectId +from pydantic import Field, validator +from typing import ClassVar +from datetime import datetime, timezone + +from todo.models.common.document import Document +from todo.models.common.pyobjectid import PyObjectId + + +class TeamCreationInviteCodeModel(Document): + """ + Model for team creation invite codes. + """ + + collection_name: ClassVar[str] = "team_creation_invite_codes" + + code: str = Field(description="The actual invite code") + description: str | None = None + created_by: PyObjectId + created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) + used_at: datetime | None = None + used_by: PyObjectId | None = None + is_used: bool = False + + @validator("created_by", "used_by") + def validate_object_id(cls, v): + if v is None: + return v + if not ObjectId.is_valid(v): + raise ValueError(f"Invalid ObjectId: {v}") + return ObjectId(v) diff --git a/todo/models/user.py b/todo/models/user.py new file mode 100644 index 00000000..932cc072 --- /dev/null +++ b/todo/models/user.py @@ -0,0 +1,21 @@ +from pydantic import Field, EmailStr +from typing import ClassVar +from datetime import datetime, timezone + +from todo.models.common.document import Document + + +class UserModel(Document): + """ + Model for external users authenticated via Google OAuth. + Separate from internal RDS authenticated users. + """ + + collection_name: ClassVar[str] = "users" + + google_id: str + email_id: EmailStr + name: str + picture: str | None = None + created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) + updated_at: datetime | None = None diff --git a/todo/models/user_role.py b/todo/models/user_role.py new file mode 100644 index 00000000..d6792b6e --- /dev/null +++ b/todo/models/user_role.py @@ -0,0 +1,45 @@ +from pydantic import Field, validator, ConfigDict +from typing import ClassVar +from datetime import datetime, timezone + +from todo.models.common.document import Document +from todo.models.common.pyobjectid import PyObjectId +from todo.constants.role import RoleScope, RoleName, VALID_ROLE_NAMES_BY_SCOPE + + +class UserRoleModel(Document): + """User-role relationship model""" + + collection_name: ClassVar[str] = "user_roles" + + id: PyObjectId | None = Field(None, alias="_id") + user_id: str + role_name: RoleName + scope: RoleScope + team_id: str | None = None + is_active: bool = True + created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) + created_by: str = "system" + + model_config = ConfigDict(ser_enum="value", from_attributes=True, populate_by_name=True, use_enum_values=True) + + @validator("role_name") + def validate_role_name(cls, v, values): + """Validate role_name is valid for the given scope.""" + scope = values.get("scope") + if scope and scope.value in VALID_ROLE_NAMES_BY_SCOPE: + valid_roles = VALID_ROLE_NAMES_BY_SCOPE[scope.value] + role_value = v.value if hasattr(v, "value") else v + if role_value not in valid_roles: + raise ValueError(f"Invalid role '{role_value}' for scope '{scope.value}'. Valid roles: {valid_roles}") + return v + + @validator("team_id") + def validate_team_id(cls, v, values): + """Validate team_id requirements based on scope.""" + scope = values.get("scope") + if scope == RoleScope.TEAM and not v: + raise ValueError("team_id is required for TEAM scope roles") + if scope == RoleScope.GLOBAL and v: + raise ValueError("team_id should not be provided for GLOBAL scope roles") + return v diff --git a/todo/models/watchlist.py b/todo/models/watchlist.py new file mode 100644 index 00000000..76d88b23 --- /dev/null +++ b/todo/models/watchlist.py @@ -0,0 +1,16 @@ +from typing import ClassVar +from datetime import datetime + +from todo.models.common.document import Document + + +class WatchlistModel(Document): + collection_name: ClassVar[str] = "watchlist" + + taskId: str + userId: str + isActive: bool = True + createdAt: datetime + createdBy: str + updatedAt: datetime | None = None + updatedBy: str | None = None diff --git a/todo/repositories/abstract_repository.py b/todo/repositories/abstract_repository.py new file mode 100644 index 00000000..aae7a3a0 --- /dev/null +++ b/todo/repositories/abstract_repository.py @@ -0,0 +1,200 @@ +from abc import ABC, abstractmethod +from typing import Any, Dict, List, Optional, TypeVar, Generic +from pydantic import BaseModel + +T = TypeVar("T", bound=BaseModel) + + +class AbstractRepository(ABC, Generic[T]): + """ + Abstract repository interface that defines the contract for data access. + This enables seamless switching between MongoDB and Postgres in the future. + """ + + @abstractmethod + def create(self, data: Dict[str, Any]) -> T: + """Create a new document/record.""" + pass + + @abstractmethod + def get_by_id(self, id: str) -> Optional[T]: + """Get a document/record by ID.""" + pass + + @abstractmethod + def get_all(self, filters: Optional[Dict[str, Any]] = None, skip: int = 0, limit: int = 100) -> List[T]: + """Get all documents/records with optional filtering and pagination.""" + pass + + @abstractmethod + def update(self, id: str, data: Dict[str, Any]) -> Optional[T]: + """Update a document/record by ID.""" + pass + + @abstractmethod + def delete(self, id: str) -> bool: + """Delete a document/record by ID.""" + pass + + @abstractmethod + def count(self, filters: Optional[Dict[str, Any]] = None) -> int: + """Count documents/records with optional filtering.""" + pass + + @abstractmethod + def exists(self, id: str) -> bool: + """Check if a document/record exists by ID.""" + pass + + +class AbstractUserRepository(AbstractRepository[T]): + """Abstract repository for user operations.""" + + @abstractmethod + def get_by_email(self, email: str) -> Optional[T]: + """Get user by email address.""" + pass + + @abstractmethod + def get_by_google_id(self, google_id: str) -> Optional[T]: + """Get user by Google ID.""" + pass + + +class AbstractTaskRepository(AbstractRepository[T]): + """Abstract repository for task operations.""" + + @abstractmethod + def get_by_user( + self, user_id: str, filters: Optional[Dict[str, Any]] = None, skip: int = 0, limit: int = 100 + ) -> List[T]: + """Get tasks by user ID.""" + pass + + @abstractmethod + def get_by_team( + self, team_id: str, filters: Optional[Dict[str, Any]] = None, skip: int = 0, limit: int = 100 + ) -> List[T]: + """Get tasks by team ID.""" + pass + + @abstractmethod + def get_by_status( + self, status: str, filters: Optional[Dict[str, Any]] = None, skip: int = 0, limit: int = 100 + ) -> List[T]: + """Get tasks by status.""" + pass + + @abstractmethod + def get_by_priority( + self, priority: str, filters: Optional[Dict[str, Any]] = None, skip: int = 0, limit: int = 100 + ) -> List[T]: + """Get tasks by priority.""" + pass + + +class AbstractTeamRepository(AbstractRepository[T]): + """Abstract repository for team operations.""" + + @abstractmethod + def get_by_invite_code(self, invite_code: str) -> Optional[T]: + """Get team by invite code.""" + pass + + @abstractmethod + def get_by_user(self, user_id: str) -> List[T]: + """Get teams by user ID.""" + pass + + +class AbstractLabelRepository(AbstractRepository[T]): + """Abstract repository for label operations.""" + + @abstractmethod + def get_by_name(self, name: str) -> Optional[T]: + """Get label by name.""" + pass + + +class AbstractRoleRepository(AbstractRepository[T]): + """Abstract repository for role operations.""" + + @abstractmethod + def get_by_name(self, name: str) -> Optional[T]: + """Get role by name.""" + pass + + +class AbstractTaskAssignmentRepository(AbstractRepository[T]): + """Abstract repository for task assignment operations.""" + + @abstractmethod + def get_by_task(self, task_id: str) -> List[T]: + """Get assignments by task ID.""" + pass + + @abstractmethod + def get_by_user(self, user_id: str) -> List[T]: + """Get assignments by user ID.""" + pass + + @abstractmethod + def get_by_team(self, team_id: str) -> List[T]: + """Get assignments by team ID.""" + pass + + +class AbstractWatchlistRepository(AbstractRepository[T]): + """Abstract repository for watchlist operations.""" + + @abstractmethod + def get_by_user(self, user_id: str) -> List[T]: + """Get watchlists by user ID.""" + pass + + +class AbstractUserRoleRepository(AbstractRepository[T]): + """Abstract repository for user role operations.""" + + @abstractmethod + def get_by_user(self, user_id: str) -> List[T]: + """Get user roles by user ID.""" + pass + + @abstractmethod + def get_by_team(self, team_id: str) -> List[T]: + """Get user roles by team ID.""" + pass + + +class AbstractUserTeamDetailsRepository(AbstractRepository[T]): + """Abstract repository for user team details operations.""" + + @abstractmethod + def get_by_user(self, user_id: str) -> List[T]: + """Get user team details by user ID.""" + pass + + @abstractmethod + def get_by_team(self, team_id: str) -> List[T]: + """Get user team details by team ID.""" + pass + + +class AbstractAuditLogRepository(AbstractRepository[T]): + """Abstract repository for audit log operations.""" + + @abstractmethod + def get_by_user(self, user_id: str, skip: int = 0, limit: int = 100) -> List[T]: + """Get audit logs by user ID.""" + pass + + @abstractmethod + def get_by_collection(self, collection_name: str, skip: int = 0, limit: int = 100) -> List[T]: + """Get audit logs by collection name.""" + pass + + @abstractmethod + def get_by_action(self, action: str, skip: int = 0, limit: int = 100) -> List[T]: + """Get audit logs by action.""" + pass diff --git a/todo/repositories/audit_log_repository.py b/todo/repositories/audit_log_repository.py new file mode 100644 index 00000000..8f94ed1b --- /dev/null +++ b/todo/repositories/audit_log_repository.py @@ -0,0 +1,50 @@ +from todo.models.audit_log import AuditLogModel +from todo.repositories.common.mongo_repository import MongoRepository +from datetime import datetime, timezone +from todo.services.enhanced_dual_write_service import EnhancedDualWriteService + + +class AuditLogRepository(MongoRepository): + collection_name = AuditLogModel.collection_name + + @classmethod + def create(cls, audit_log: AuditLogModel) -> AuditLogModel: + collection = cls.get_collection() + audit_log.timestamp = datetime.now(timezone.utc) + audit_log_dict = audit_log.model_dump(mode="json", by_alias=True, exclude_none=True) + insert_result = collection.insert_one(audit_log_dict) + audit_log.id = insert_result.inserted_id + + dual_write_service = EnhancedDualWriteService() + audit_log_data = { + "task_id": str(audit_log.task_id) if audit_log.task_id else None, + "team_id": str(audit_log.team_id) if audit_log.team_id else None, + "previous_executor_id": str(audit_log.previous_executor_id) if audit_log.previous_executor_id else None, + "new_executor_id": str(audit_log.new_executor_id) if audit_log.new_executor_id else None, + "spoc_id": str(audit_log.spoc_id) if audit_log.spoc_id else None, + "action": audit_log.action, + "timestamp": audit_log.timestamp, + "status_from": audit_log.status_from, + "status_to": audit_log.status_to, + "assignee_from": str(audit_log.assignee_from) if audit_log.assignee_from else None, + "assignee_to": str(audit_log.assignee_to) if audit_log.assignee_to else None, + "performed_by": str(audit_log.performed_by) if audit_log.performed_by else None, + } + + dual_write_success = dual_write_service.create_document( + collection_name="audit_logs", data=audit_log_data, mongo_id=str(audit_log.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync audit log {audit_log.id} to Postgres") + + return audit_log + + @classmethod + def get_by_team_id(cls, team_id: str) -> list[AuditLogModel]: + collection = cls.get_collection() + logs = collection.find({"team_id": team_id}).sort("timestamp", -1) + return [AuditLogModel(**log) for log in logs] diff --git a/todo/repositories/label_repository.py b/todo/repositories/label_repository.py index f65fe1a8..75f3af41 100644 --- a/todo/repositories/label_repository.py +++ b/todo/repositories/label_repository.py @@ -1,6 +1,7 @@ -from typing import List - +from typing import List, Tuple from bson import ObjectId +import re + from todo.models.label import LabelModel from todo.repositories.common.mongo_repository import MongoRepository @@ -15,3 +16,39 @@ def list_by_ids(cls, ids: List[ObjectId]) -> List[LabelModel]: labels_collection = cls.get_collection() labels_cursor = labels_collection.find({"_id": {"$in": ids}}) return [LabelModel(**label) for label in labels_cursor] + + @classmethod + def get_all(cls, page, limit, search) -> Tuple[int, List[LabelModel]]: + """ + Get paginated list of labels with optional search on name. + """ + labels_collection = cls.get_collection() + + query = {"isDeleted": {"$ne": True}} + + if search: + escaped_search = re.escape(search) + query["name"] = {"$regex": escaped_search, "$options": "i"} + + zero_indexed_page = page - 1 + skip = zero_indexed_page * limit + + pipeline = [ + {"$match": query}, + { + "$facet": { + "total": [{"$count": "count"}], + "data": [{"$sort": {"name": 1}}, {"$skip": skip}, {"$limit": limit}], + } + }, + ] + + aggregation_result = labels_collection.aggregate(pipeline) + result = next(aggregation_result, {"total": [], "data": []}) + + total_docs = result.get("total", []) + total_count = total_docs[0].get("count", 0) if total_docs else 0 + + labels = [LabelModel(**doc) for doc in result.get("data", [])] + + return total_count, labels diff --git a/todo/repositories/postgres_repository.py b/todo/repositories/postgres_repository.py new file mode 100644 index 00000000..09ea78c3 --- /dev/null +++ b/todo/repositories/postgres_repository.py @@ -0,0 +1,304 @@ +from typing import Any, Dict, List, Optional, Type +from django.db import models +from django.core.exceptions import ObjectDoesNotExist + +from todo.repositories.abstract_repository import AbstractRepository +from todo.models.postgres import ( + PostgresUser, + PostgresTask, + PostgresTeam, + PostgresUserTeamDetails, + PostgresLabel, + PostgresRole, + PostgresTaskAssignment, + PostgresWatchlist, + PostgresUserRole, + PostgresAuditLog, +) + + +class BasePostgresRepository(AbstractRepository): + """ + Base Postgres repository implementation. + Provides common CRUD operations for Postgres models. + """ + + def __init__(self, model_class: Type[models.Model]): + self.model_class = model_class + + def create(self, data: Dict[str, Any]) -> Any: + """Create a new record in Postgres.""" + try: + instance = self.model_class.objects.create(**data) + return instance + except Exception as e: + raise Exception(f"Failed to create record: {str(e)}") + + def get_by_id(self, id: str) -> Optional[Any]: + """Get a record by ID (using mongo_id field).""" + try: + return self.model_class.objects.get(mongo_id=id) + except ObjectDoesNotExist: + return None + + def get_all(self, filters: Optional[Dict[str, Any]] = None, skip: int = 0, limit: int = 100) -> List[Any]: + """Get all records with optional filtering and pagination.""" + queryset = self.model_class.objects.all() + + if filters: + queryset = self._apply_filters(queryset, filters) + + return list(queryset[skip : skip + limit]) + + def update(self, id: str, data: Dict[str, Any]) -> Optional[Any]: + """Update a record by ID.""" + try: + instance = self.model_class.objects.get(mongo_id=id) + for field, value in data.items(): + if hasattr(instance, field): + setattr(instance, field, value) + instance.save() + return instance + except ObjectDoesNotExist: + return None + + def delete(self, id: str) -> bool: + """Delete a record by ID.""" + try: + instance = self.model_class.objects.get(mongo_id=id) + instance.delete() + return True + except ObjectDoesNotExist: + return False + + def count(self, filters: Optional[Dict[str, Any]] = None) -> int: + """Count records with optional filtering.""" + queryset = self.model_class.objects.all() + + if filters: + queryset = self._apply_filters(queryset, filters) + + return queryset.count() + + def exists(self, id: str) -> bool: + """Check if a record exists by ID.""" + return self.model_class.objects.filter(mongo_id=id).exists() + + def _apply_filters(self, queryset, filters: Dict[str, Any]): + """Apply filters to a queryset.""" + for field, value in filters.items(): + if hasattr(self.model_class, field): + if isinstance(value, dict): + # Handle complex filters like {'gte': value, 'lte': value} + for operator, operator_value in value.items(): + if operator == "gte": + queryset = queryset.filter(**{f"{field}__gte": operator_value}) + elif operator == "lte": + queryset = queryset.filter(**{f"{field}__lte": operator_value}) + elif operator == "contains": + queryset = queryset.filter(**{f"{field}__icontains": operator_value}) + elif operator == "in": + queryset = queryset.filter(**{f"{field}__in": operator_value}) + else: + queryset = queryset.filter(**{field: value}) + + return queryset + + +class PostgresUserRepository(BasePostgresRepository, AbstractRepository): + """Postgres repository for user operations.""" + + def __init__(self): + super().__init__(PostgresUser) + + def get_by_email(self, email: str) -> Optional[PostgresUser]: + """Get user by email address.""" + try: + return PostgresUser.objects.get(email_id=email) + except ObjectDoesNotExist: + return None + + def get_by_google_id(self, google_id: str) -> Optional[PostgresUser]: + """Get user by Google ID.""" + try: + return PostgresUser.objects.get(google_id=google_id) + except ObjectDoesNotExist: + return None + + +class PostgresTaskRepository(BasePostgresRepository, AbstractRepository): + """Postgres repository for task operations.""" + + def __init__(self): + super().__init__(PostgresTask) + + def get_by_user( + self, user_id: str, filters: Optional[Dict[str, Any]] = None, skip: int = 0, limit: int = 100 + ) -> List[PostgresTask]: + """Get tasks by user ID.""" + queryset = PostgresTask.objects.filter(created_by=user_id) + + if filters: + queryset = self._apply_filters(queryset, filters) + + return list(queryset[skip : skip + limit]) + + def get_by_team( + self, team_id: str, filters: Optional[Dict[str, Any]] = None, skip: int = 0, limit: int = 100 + ) -> List[PostgresTask]: + """Get tasks by team ID.""" + # This would need to be implemented based on your team-task relationship + # For now, returning empty list + return [] + + def get_by_status( + self, status: str, filters: Optional[Dict[str, Any]] = None, skip: int = 0, limit: int = 100 + ) -> List[PostgresTask]: + """Get tasks by status.""" + queryset = PostgresTask.objects.filter(status=status) + + if filters: + queryset = self._apply_filters(queryset, filters) + + return list(queryset[skip : skip + limit]) + + def get_by_priority( + self, priority: str, filters: Optional[Dict[str, Any]] = None, skip: int = 0, limit: int = 100 + ) -> List[PostgresTask]: + """Get tasks by priority.""" + queryset = PostgresTask.objects.filter(priority=priority) + + if filters: + queryset = self._apply_filters(queryset, filters) + + return list(queryset[skip : skip + limit]) + + +class PostgresTeamRepository(BasePostgresRepository, AbstractRepository): + """Postgres repository for team operations.""" + + def __init__(self): + super().__init__(PostgresTeam) + + def get_by_invite_code(self, invite_code: str) -> Optional[PostgresTeam]: + """Get team by invite code.""" + try: + return PostgresTeam.objects.get(invite_code=invite_code) + except ObjectDoesNotExist: + return None + + def get_by_user(self, user_id: str) -> List[PostgresTeam]: + """Get teams by user ID.""" + # Get teams where user is a member + user_teams = PostgresUserTeamDetails.objects.filter(user_id=user_id, is_active=True).values_list( + "team_id", flat=True + ) + + return list(PostgresTeam.objects.filter(mongo_id__in=user_teams)) + + +class PostgresLabelRepository(BasePostgresRepository, AbstractRepository): + """Postgres repository for label operations.""" + + def __init__(self): + super().__init__(PostgresLabel) + + def get_by_name(self, name: str) -> Optional[PostgresLabel]: + """Get label by name.""" + try: + return PostgresLabel.objects.get(name=name) + except ObjectDoesNotExist: + return None + + +class PostgresRoleRepository(BasePostgresRepository, AbstractRepository): + """Postgres repository for role operations.""" + + def __init__(self): + super().__init__(PostgresRole) + + def get_by_name(self, name: str) -> Optional[PostgresRole]: + """Get role by name.""" + try: + return PostgresRole.objects.get(name=name) + except ObjectDoesNotExist: + return None + + +class PostgresTaskAssignmentRepository(BasePostgresRepository, AbstractRepository): + """Postgres repository for task assignment operations.""" + + def __init__(self): + super().__init__(PostgresTaskAssignment) + + def get_by_task(self, task_id: str) -> List[PostgresTaskAssignment]: + """Get assignments by task ID.""" + return list(PostgresTaskAssignment.objects.filter(task_mongo_id=task_id)) + + def get_by_user(self, user_id: str) -> List[PostgresTaskAssignment]: + """Get assignments by user ID.""" + return list(PostgresTaskAssignment.objects.filter(user_mongo_id=user_id)) + + def get_by_team(self, team_id: str) -> List[PostgresTaskAssignment]: + """Get assignments by team ID.""" + return list(PostgresTaskAssignment.objects.filter(team_mongo_id=team_id)) + + +class PostgresWatchlistRepository(BasePostgresRepository, AbstractRepository): + """Postgres repository for watchlist operations.""" + + def __init__(self): + super().__init__(PostgresWatchlist) + + def get_by_user(self, user_id: str) -> List[PostgresWatchlist]: + """Get watchlists by user ID.""" + return list(PostgresWatchlist.objects.filter(user_mongo_id=user_id)) + + +class PostgresUserRoleRepository(BasePostgresRepository, AbstractRepository): + """Postgres repository for user role operations.""" + + def __init__(self): + super().__init__(PostgresUserRole) + + def get_by_user(self, user_id: str) -> List[PostgresUserRole]: + """Get user roles by user ID.""" + return list(PostgresUserRole.objects.filter(user_mongo_id=user_id)) + + def get_by_team(self, team_id: str) -> List[PostgresUserRole]: + """Get user roles by team ID.""" + return list(PostgresUserRole.objects.filter(team_mongo_id=team_id)) + + +class PostgresUserTeamDetailsRepository(BasePostgresRepository, AbstractRepository): + """Postgres repository for user team details operations.""" + + def __init__(self): + super().__init__(PostgresUserTeamDetails) + + def get_by_user(self, user_id: str) -> List[PostgresUserTeamDetails]: + """Get user team details by user ID.""" + return list(PostgresUserTeamDetails.objects.filter(user_id=user_id)) + + def get_by_team(self, team_id: str) -> List[PostgresUserTeamDetails]: + """Get user team details by team ID.""" + return list(PostgresUserTeamDetails.objects.filter(team_id=team_id)) + + +class PostgresAuditLogRepository(BasePostgresRepository, AbstractRepository): + """Postgres repository for audit log operations.""" + + def __init__(self): + super().__init__(PostgresAuditLog) + + def get_by_user(self, user_id: str, skip: int = 0, limit: int = 100) -> List[PostgresAuditLog]: + """Get audit logs by user ID.""" + return list(PostgresAuditLog.objects.filter(user_mongo_id=user_id)[skip : skip + limit]) + + def get_by_collection(self, collection_name: str, skip: int = 0, limit: int = 100) -> List[PostgresAuditLog]: + """Get audit logs by collection name.""" + return list(PostgresAuditLog.objects.filter(collection_name=collection_name)[skip : skip + limit]) + + def get_by_action(self, action: str, skip: int = 0, limit: int = 100) -> List[PostgresAuditLog]: + """Get audit logs by action.""" + return list(PostgresAuditLog.objects.filter(action=action)[skip : skip + limit]) diff --git a/todo/repositories/role_repository.py b/todo/repositories/role_repository.py new file mode 100644 index 00000000..c01cfc6a --- /dev/null +++ b/todo/repositories/role_repository.py @@ -0,0 +1,71 @@ +from typing import List, Dict, Any, Optional +from bson import ObjectId +import logging + +from todo.models.role import RoleModel +from todo.repositories.common.mongo_repository import MongoRepository +from todo.constants.role import RoleScope + +logger = logging.getLogger(__name__) + + +class RoleRepository(MongoRepository): + collection_name = RoleModel.collection_name + + @classmethod + def list_all(cls, filters: Optional[Dict[str, Any]] = None) -> List[RoleModel]: + roles_collection = cls.get_collection() + + query = {} + if filters: + if "is_active" in filters: + query["is_active"] = filters["is_active"] + if "name" in filters: + query["name"] = filters["name"] + if "scope" in filters: + query["scope"] = filters["scope"] + + roles_cursor = roles_collection.find(query) + roles = [] + + for role_doc in roles_cursor: + try: + role_model = cls._document_to_model(role_doc) + roles.append(role_model) + except Exception as e: + logger.error(f"Error converting role document to model: {e}") + logger.error(f"Document: {role_doc}") + continue + + return roles + + @classmethod + def _document_to_model(cls, role_doc: dict) -> RoleModel: + if "scope" in role_doc and isinstance(role_doc["scope"], str): + role_doc["scope"] = RoleScope(role_doc["scope"]) + + return RoleModel(**role_doc) + + @classmethod + def get_by_id(cls, role_id: str) -> Optional[RoleModel]: + roles_collection = cls.get_collection() + role_data = roles_collection.find_one({"_id": ObjectId(role_id)}) + if role_data: + return cls._document_to_model(role_data) + return None + + @classmethod + def get_by_name(cls, name: str) -> Optional[RoleModel]: + roles_collection = cls.get_collection() + role_data = roles_collection.find_one({"name": name}) + if role_data: + return cls._document_to_model(role_data) + return None + + @classmethod + def get_by_name_and_scope(cls, name: str, scope: str) -> Optional[RoleModel]: + roles_collection = cls.get_collection() + role_data = roles_collection.find_one({"name": name, "scope": scope}) + if role_data: + return cls._document_to_model(role_data) + return None diff --git a/todo/repositories/task_assignment_repository.py b/todo/repositories/task_assignment_repository.py new file mode 100644 index 00000000..773d9328 --- /dev/null +++ b/todo/repositories/task_assignment_repository.py @@ -0,0 +1,357 @@ +from datetime import datetime, timezone +from typing import Optional, List +from bson import ObjectId + +from todo.exceptions.task_exceptions import TaskNotFoundException +from todo.models.task_assignment import TaskAssignmentModel +from todo.repositories.common.mongo_repository import MongoRepository +from todo.models.common.pyobjectid import PyObjectId +from todo.services.enhanced_dual_write_service import EnhancedDualWriteService + + +class TaskAssignmentRepository(MongoRepository): + collection_name = TaskAssignmentModel.collection_name + + @classmethod + def create(cls, task_assignment: TaskAssignmentModel) -> TaskAssignmentModel: + collection = cls.get_collection() + task_assignment.created_at = datetime.now(timezone.utc) + task_assignment.updated_at = None + + task_assignment_dict = task_assignment.model_dump(mode="json", by_alias=True, exclude_none=True) + insert_result = collection.insert_one(task_assignment_dict) + task_assignment.id = insert_result.inserted_id + + dual_write_service = EnhancedDualWriteService() + task_assignment_data = { + "task_mongo_id": str(task_assignment.task_id), + "assignee_id": str(task_assignment.assignee_id), + "user_type": task_assignment.user_type, + "team_id": str(task_assignment.team_id) if task_assignment.team_id else None, + "is_active": task_assignment.is_active, + "created_at": task_assignment.created_at, + "updated_at": task_assignment.updated_at, + "created_by": str(task_assignment.created_by), + "updated_by": str(task_assignment.updated_by) if task_assignment.updated_by else None, + } + + dual_write_success = dual_write_service.create_document( + collection_name="task_assignments", data=task_assignment_data, mongo_id=str(task_assignment.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync task assignment {task_assignment.id} to Postgres") + + return task_assignment + + @classmethod + def get_by_task_id(cls, task_id: str) -> Optional[TaskAssignmentModel]: + """ + Get the task assignment for a specific task. + """ + collection = cls.get_collection() + try: + # Try with ObjectId first + task_assignment_data = collection.find_one({"task_id": ObjectId(task_id), "is_active": True}) + if not task_assignment_data: + # Try with string if ObjectId doesn't work + task_assignment_data = collection.find_one({"task_id": task_id, "is_active": True}) + + if task_assignment_data: + return TaskAssignmentModel(**task_assignment_data) + return None + except Exception: + return None + + @classmethod + def get_by_assignee_id(cls, assignee_id: str, user_type: str) -> List[TaskAssignmentModel]: + """ + Get all task assignments for a specific assignee (team or user). + """ + collection = cls.get_collection() + try: + # Try with ObjectId first + task_assignments_data = collection.find( + {"assignee_id": ObjectId(assignee_id), "user_type": user_type, "is_active": True} + ) + if not list(task_assignments_data): + # Try with string if ObjectId doesn't work + task_assignments_data = collection.find( + {"assignee_id": assignee_id, "user_type": user_type, "is_active": True} + ) + return [TaskAssignmentModel(**data) for data in task_assignments_data] + except Exception: + return [] + + @classmethod + def update_assignment( + cls, task_id: str, assignee_id: str, user_type: str, user_id: str + ) -> Optional[TaskAssignmentModel]: + """ + Update the assignment for a task. + """ + collection = cls.get_collection() + try: + current_assignment = cls.get_by_task_id(task_id) + + if not current_assignment: + raise TaskNotFoundException(task_id) + + team_id = None + + if user_type == "team": + team_id = assignee_id + elif user_type == "user" and current_assignment.team_id is not None: + team_id = current_assignment.team_id + + # Deactivate current assignment if exists (try both ObjectId and string) + collection.update_many( + {"task_id": ObjectId(task_id), "is_active": True}, + { + "$set": { + "is_active": False, + "updated_by": ObjectId(user_id), + "updated_at": datetime.now(timezone.utc), + } + }, + ) + # Also try with string + collection.update_many( + {"task_id": task_id, "is_active": True}, + { + "$set": { + "is_active": False, + "updated_by": ObjectId(user_id), + "updated_at": datetime.now(timezone.utc), + } + }, + ) + + # Sync deactivation to PostgreSQL + if current_assignment: + dual_write_service = EnhancedDualWriteService() + deactivation_data = { + "task_mongo_id": str(current_assignment.task_id), + "assignee_id": str(current_assignment.assignee_id), + "user_type": current_assignment.user_type, + "team_id": str(current_assignment.team_id) if current_assignment.team_id else None, + "is_active": False, + "created_at": current_assignment.created_at, + "updated_at": datetime.now(timezone.utc), + "created_by": str(current_assignment.created_by), + "updated_by": str(user_id), + } + + dual_write_success = dual_write_service.update_document( + collection_name="task_assignments", data=deactivation_data, mongo_id=str(current_assignment.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync task assignment deactivation {current_assignment.id} to Postgres") + + new_assignment = TaskAssignmentModel( + _id=PyObjectId(), + task_id=PyObjectId(task_id), + assignee_id=PyObjectId(assignee_id), + user_type=user_type, + created_by=PyObjectId(user_id), + updated_by=None, + team_id=PyObjectId(team_id), + ) + + return cls.create(new_assignment) + except Exception: + return None + + @classmethod + def delete_assignment(cls, task_id: str, user_id: str) -> bool: + collection = cls.get_collection() + try: + # Get current assignment first + current_assignment = cls.get_by_task_id(task_id) + if not current_assignment: + return False + + # Try with ObjectId first + result = collection.update_one( + {"task_id": ObjectId(task_id), "is_active": True}, + { + "$set": { + "is_active": False, + "updated_by": ObjectId(user_id), + "updated_at": datetime.now(timezone.utc), + } + }, + ) + if result.modified_count == 0: + # Try with string if ObjectId doesn't work + result = collection.update_one( + {"task_id": task_id, "is_active": True}, + { + "$set": { + "is_active": False, + "updated_by": ObjectId(user_id), + "updated_at": datetime.now(timezone.utc), + } + }, + ) + + if result.modified_count > 0: + # Sync to PostgreSQL + dual_write_service = EnhancedDualWriteService() + assignment_data = { + "task_mongo_id": str(current_assignment.task_id), + "assignee_id": str(current_assignment.assignee_id), + "user_type": current_assignment.user_type, + "team_id": str(current_assignment.team_id) if current_assignment.team_id else None, + "is_active": False, + "created_at": current_assignment.created_at, + "updated_at": datetime.now(timezone.utc), + "created_by": str(current_assignment.created_by), + "updated_by": str(user_id), + } + + dual_write_success = dual_write_service.update_document( + collection_name="task_assignments", data=assignment_data, mongo_id=str(current_assignment.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync task assignment deletion {current_assignment.id} to Postgres") + + return result.modified_count > 0 + except Exception: + return False + + @classmethod + def update_executor(cls, task_id: str, executor_id: str, user_id: str) -> bool: + collection = cls.get_collection() + try: + # Get current assignment first + current_assignment = cls.get_by_task_id(task_id) + if not current_assignment: + return False + + result = collection.update_one( + {"task_id": ObjectId(task_id), "is_active": True}, + { + "$set": { + "assignee_id": executor_id, + "user_type": "user", + "updated_by": user_id, + "updated_at": datetime.now(timezone.utc), + } + }, + ) + if result.modified_count == 0: + # Try with string if ObjectId doesn't work + result = collection.update_one( + {"task_id": task_id, "is_active": True}, + { + "$set": { + "assignee_id": executor_id, + "user_type": "user", + "updated_by": user_id, + "updated_at": datetime.now(timezone.utc), + } + }, + ) + + if result.modified_count > 0: + # Sync to PostgreSQL + dual_write_service = EnhancedDualWriteService() + assignment_data = { + "task_mongo_id": str(current_assignment.task_id), + "assignee_id": str(executor_id), + "user_type": "user", + "team_id": str(current_assignment.team_id) if current_assignment.team_id else None, + "is_active": current_assignment.is_active, + "created_at": current_assignment.created_at, + "updated_at": datetime.now(timezone.utc), + "created_by": str(current_assignment.created_by), + "updated_by": str(user_id), + } + + dual_write_success = dual_write_service.update_document( + collection_name="task_assignments", data=assignment_data, mongo_id=str(current_assignment.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync task assignment update {current_assignment.id} to Postgres") + + return result.modified_count > 0 + except Exception: + return False + + @classmethod + def deactivate_by_task_id(cls, task_id: str, user_id: str) -> bool: + collection = cls.get_collection() + try: + # Get all active assignments for this task + active_assignments = cls.get_by_task_id(task_id) + if not active_assignments: + return False + + # Try with ObjectId first + result = collection.update_many( + {"task_id": ObjectId(task_id), "is_active": True}, + { + "$set": { + "is_active": False, + "updated_by": ObjectId(user_id), + "updated_at": datetime.now(timezone.utc), + } + }, + ) + if result.modified_count == 0: + # Try with string if ObjectId doesn't work + result = collection.update_many( + {"task_id": task_id, "is_active": True}, + { + "$set": { + "is_active": False, + "updated_by": ObjectId(user_id), + "updated_at": datetime.now(timezone.utc), + } + }, + ) + + if result.modified_count > 0: + # Sync to PostgreSQL for each assignment + dual_write_service = EnhancedDualWriteService() + assignment_data = { + "task_mongo_id": str(active_assignments.task_id), + "assignee_id": str(active_assignments.assignee_id), + "user_type": active_assignments.user_type, + "team_id": str(active_assignments.team_id) if active_assignments.team_id else None, + "is_active": False, + "created_at": active_assignments.created_at, + "updated_at": datetime.now(timezone.utc), + "created_by": str(active_assignments.created_by), + "updated_by": str(user_id), + } + + dual_write_success = dual_write_service.update_document( + collection_name="task_assignments", data=assignment_data, mongo_id=str(active_assignments.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync task assignment deactivation {active_assignments.id} to Postgres") + + return result.modified_count > 0 + except Exception: + return False diff --git a/todo/repositories/task_repository.py b/todo/repositories/task_repository.py index 38669196..038d6540 100644 --- a/todo/repositories/task_repository.py +++ b/todo/repositories/task_repository.py @@ -1,24 +1,166 @@ from datetime import datetime, timezone from typing import List +from bson import ObjectId +from pymongo import ReturnDocument +from todo.exceptions.task_exceptions import TaskNotFoundException from todo.models.task import TaskModel from todo.repositories.common.mongo_repository import MongoRepository -from todo.constants.messages import RepositoryErrors +from todo.repositories.task_assignment_repository import TaskAssignmentRepository +from todo.constants.messages import ApiErrors, RepositoryErrors +from todo.constants.task import ( + SORT_FIELD_PRIORITY, + SORT_FIELD_ASSIGNEE, + SORT_FIELD_UPDATED_AT, + SORT_ORDER_DESC, + TaskStatus, +) +from todo.repositories.team_repository import UserTeamDetailsRepository +from todo.services.enhanced_dual_write_service import EnhancedDualWriteService +from todo.models.postgres import PostgresTask, PostgresDeferredDetails class TaskRepository(MongoRepository): collection_name = TaskModel.collection_name @classmethod - def list(cls, page: int, limit: int) -> List[TaskModel]: + def _get_team_task_ids(cls, team_id: str) -> List[ObjectId]: + team_tasks = TaskAssignmentRepository.get_collection().find({"team_id": team_id, "is_active": True}) + team_task_ids = [ObjectId(task["task_id"]) for task in team_tasks] + return list(set(team_task_ids)) + + @classmethod + def _build_status_filter(cls, status_filter: str = None) -> dict: + now = datetime.now(timezone.utc) + + if status_filter == TaskStatus.DEFERRED.value: + return { + "$and": [ + {"deferredDetails": {"$ne": None}}, + {"deferredDetails.deferredTill": {"$gt": now}}, + ] + } + + elif status_filter == TaskStatus.DONE.value: + return { + "$or": [ + {"deferredDetails": None}, + {"deferredDetails.deferredTill": {"$lt": now}}, + ] + } + + else: + return { + "$and": [ + {"status": {"$ne": TaskStatus.DONE.value}}, + { + "$or": [ + {"deferredDetails": None}, + {"deferredDetails.deferredTill": {"$lt": now}}, + ] + }, + ] + } + + @classmethod + def list( + cls, + page: int, + limit: int, + sort_by: str, + order: str, + user_id: str = None, + team_id: str = None, + status_filter: str = None, + ) -> List[TaskModel]: tasks_collection = cls.get_collection() - tasks_cursor = tasks_collection.find().skip((page - 1) * limit).limit(limit) + + base_filter = cls._build_status_filter(status_filter) + + if team_id: + all_team_task_ids = cls._get_team_task_ids(team_id) + query_filter = {"$and": [base_filter, {"_id": {"$in": all_team_task_ids}}]} + elif user_id: + assigned_task_ids = cls._get_assigned_task_ids_for_user(user_id) + query_filter = {"$and": [base_filter, {"_id": {"$in": assigned_task_ids}}]} + else: + query_filter = base_filter + + if sort_by == SORT_FIELD_UPDATED_AT: + sort_direction = -1 if order == SORT_ORDER_DESC else 1 + pipeline = [ + {"$match": query_filter}, + {"$addFields": {"lastActivity": {"$ifNull": [{"$toDate": "$updatedAt"}, {"$toDate": "$createdAt"}]}}}, + {"$sort": {"lastActivity": sort_direction}}, + {"$skip": (page - 1) * limit}, + {"$limit": limit}, + {"$project": {"lastActivity": 0}}, + ] + tasks_cursor = tasks_collection.aggregate(pipeline) + return [TaskModel(**task) for task in tasks_cursor] + + if sort_by == SORT_FIELD_PRIORITY: + sort_direction = 1 if order == SORT_ORDER_DESC else -1 + sort_criteria = [(sort_by, sort_direction)] + elif sort_by == SORT_FIELD_ASSIGNEE: + # Assignee sorting is no longer supported since assignee is in separate collection + sort_direction = -1 if order == SORT_ORDER_DESC else 1 + sort_criteria = [("createdAt", sort_direction)] + else: + sort_direction = -1 if order == SORT_ORDER_DESC else 1 + sort_criteria = [(sort_by, sort_direction)] + + tasks_cursor = tasks_collection.find(query_filter).sort(sort_criteria).skip((page - 1) * limit).limit(limit) return [TaskModel(**task) for task in tasks_cursor] @classmethod - def count(cls) -> int: + def _get_assigned_task_ids_for_user(cls, user_id: str) -> List[ObjectId]: + """Get task IDs where user is assigned (either directly or as team member).""" + direct_assignments = TaskAssignmentRepository.get_by_assignee_id(user_id, "user") + direct_task_ids = [assignment.task_id for assignment in direct_assignments] + + # Get teams where user is a member + from todo.repositories.team_repository import TeamRepository + + user_teams = UserTeamDetailsRepository.get_by_user_id(user_id) + team_ids = [str(team.team_id) for team in user_teams] + + # Get tasks assigned to those teams (only if user is POC) + team_task_ids = [] + if team_ids: + # Get teams where user is POC + poc_teams = TeamRepository.get_collection().find( + {"_id": {"$in": [ObjectId(team_id) for team_id in team_ids]}, "is_deleted": False, "poc_id": user_id} + ) + poc_team_ids = [str(team["_id"]) for team in poc_teams] + + # Get team assignments for POC teams + if poc_team_ids: + team_assignments = TaskAssignmentRepository.get_collection().find( + {"assignee_id": {"$in": poc_team_ids}, "user_type": "team", "is_active": True} + ) + team_task_ids = [ObjectId(assignment["task_id"]) for assignment in team_assignments] + + return direct_task_ids + team_task_ids + + @classmethod + def count(cls, user_id: str = None, team_id: str = None, status_filter: str = None) -> int: tasks_collection = cls.get_collection() - return tasks_collection.count_documents({}) + + base_filter = cls._build_status_filter(status_filter) + + if team_id: + all_team_task_ids = cls._get_team_task_ids(team_id) + query_filter = {"$and": [base_filter, {"_id": {"$in": all_team_task_ids}}]} + + elif user_id: + assigned_task_ids = cls._get_assigned_task_ids_for_user(user_id) + query_filter = { + "$and": [base_filter, {"$or": [{"createdBy": user_id}, {"_id": {"$in": assigned_task_ids}}]}] + } + else: + query_filter = base_filter + return tasks_collection.count_documents(query_filter) @classmethod def get_all(cls) -> List[TaskModel]: @@ -30,6 +172,7 @@ def get_all(cls) -> List[TaskModel]: """ tasks_collection = cls.get_collection() tasks_cursor = tasks_collection.find() + return [TaskModel(**task) for task in tasks_cursor] @classmethod @@ -65,11 +208,194 @@ def create(cls, task: TaskModel) -> TaskModel: task.createdAt = datetime.now(timezone.utc) task.updatedAt = None + # Ensure createdAt is properly set + if not task.createdAt: + task.createdAt = datetime.now(timezone.utc) + task_dict = task.model_dump(mode="json", by_alias=True, exclude_none=True) insert_result = tasks_collection.insert_one(task_dict, session=session) task.id = insert_result.inserted_id + + dual_write_service = EnhancedDualWriteService() + + task_data = { + "title": task.title, + "description": task.description, + "priority": task.priority, + "status": task.status, + "displayId": task.displayId, + "isAcknowledged": task.isAcknowledged, + "isDeleted": task.isDeleted, + "startedAt": task.startedAt, + "dueAt": task.dueAt, + "createdAt": task.createdAt or datetime.now(timezone.utc), + "updatedAt": task.updatedAt, + "createdBy": str(task.createdBy), + "updatedBy": str(task.updatedBy) if task.updatedBy else None, + } + + dual_write_success = dual_write_service.create_document( + collection_name="tasks", data=task_data, mongo_id=str(task.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync task {task.id} to Postgres") + return task except Exception as e: raise ValueError(RepositoryErrors.TASK_CREATION_FAILED.format(str(e))) + + @classmethod + def get_by_id(cls, task_id: str) -> TaskModel | None: + tasks_collection = cls.get_collection() + task_data = tasks_collection.find_one({"_id": ObjectId(task_id)}) + if task_data: + return TaskModel(**task_data) + return None + + @classmethod + def delete_by_id(cls, task_id: ObjectId, user_id: str) -> TaskModel | None: + tasks_collection = cls.get_collection() + + task = tasks_collection.find_one({"_id": task_id, "isDeleted": False}) + if not task: + raise TaskNotFoundException(task_id) + + # Check if user is the creator + if user_id != task.get("createdBy"): + # Check if user is assigned to this task + assigned_task_ids = cls._get_assigned_task_ids_for_user(user_id) + if task_id not in assigned_task_ids: + raise PermissionError(ApiErrors.UNAUTHORIZED_TITLE) + + # Deactivate assignee relationship for this task + TaskAssignmentRepository.deactivate_by_task_id(str(task_id), user_id) + + deleted_task_data = tasks_collection.find_one_and_update( + {"_id": task_id}, + { + "$set": { + "isDeleted": True, + "updatedAt": datetime.now(timezone.utc), + "updatedBy": user_id, + } + }, + return_document=ReturnDocument.AFTER, + ) + + if deleted_task_data: + return TaskModel(**deleted_task_data) + return None + + @classmethod + def update(cls, task_id: str, update_data: dict) -> TaskModel | None: + if not isinstance(update_data, dict): + raise ValueError("update_data must be a dictionary.") + + try: + obj_id = ObjectId(task_id) + except Exception: + return None + + update_data_with_timestamp = {**update_data, "updatedAt": datetime.now(timezone.utc)} + update_data_with_timestamp.pop("_id", None) + update_data_with_timestamp.pop("id", None) + + tasks_collection = cls.get_collection() + + updated_task_doc = tasks_collection.find_one_and_update( + {"_id": obj_id}, {"$set": update_data_with_timestamp}, return_document=ReturnDocument.AFTER + ) + + if updated_task_doc: + task_model = TaskModel(**updated_task_doc) + + dual_write_service = EnhancedDualWriteService() + task_data = { + "title": task_model.title, + "description": task_model.description, + "priority": task_model.priority, + "status": task_model.status, + "displayId": task_model.displayId, + "isAcknowledged": task_model.isAcknowledged, + "isDeleted": task_model.isDeleted, + "startedAt": task_model.startedAt, + "dueAt": task_model.dueAt, + "createdAt": task_model.createdAt, + "updatedAt": task_model.updatedAt, + "createdBy": str(task_model.createdBy), + "updatedBy": str(task_model.updatedBy) if task_model.updatedBy else None, + } + + dual_write_success = dual_write_service.update_document( + collection_name="tasks", data=task_data, mongo_id=str(task_model.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync task update {task_model.id} to Postgres") + + # Handle deferred details if present in update_data + if "deferredDetails" in update_data: + cls._handle_deferred_details_sync(task_id, update_data["deferredDetails"]) + + return task_model + return None + + @classmethod + def get_tasks_for_user(cls, user_id: str, page: int, limit: int, status_filter: str = None) -> List[TaskModel]: + tasks_collection = cls.get_collection() + assigned_task_ids = cls._get_assigned_task_ids_for_user(user_id) + + base_filter = cls._build_status_filter(status_filter) + + query = {"$and": [base_filter, {"_id": {"$in": assigned_task_ids}}]} + tasks_cursor = tasks_collection.find(query).skip((page - 1) * limit).limit(limit) + return [TaskModel(**task) for task in tasks_cursor] + + @classmethod + def get_by_ids(cls, task_ids: List[str]) -> List[TaskModel]: + """ + Get multiple tasks by their IDs in a single database query. + Returns only the tasks that exist. + """ + if not task_ids: + return [] + tasks_collection = cls.get_collection() + object_ids = [ObjectId(task_id) for task_id in task_ids] + cursor = tasks_collection.find({"_id": {"$in": object_ids}}) + return [TaskModel(**doc) for doc in cursor] + + @classmethod + def _handle_deferred_details_sync(cls, task_id: str, deferred_details: dict) -> None: + """Handle deferred details synchronization to PostgreSQL""" + try: + postgres_task = PostgresTask.objects.get(mongo_id=task_id) + + if deferred_details: + deferred_details_data = { + "task": postgres_task, + "deferred_at": deferred_details.get("deferredAt"), + "deferred_till": deferred_details.get("deferredTill"), + "deferred_by": str(deferred_details.get("deferredBy")), + } + + PostgresDeferredDetails.objects.update_or_create(task=postgres_task, defaults=deferred_details_data) + else: + # Remove deferred details if None + PostgresDeferredDetails.objects.filter(task=postgres_task).delete() + + except PostgresTask.DoesNotExist: + pass + except Exception as e: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync deferred details to PostgreSQL for task {task_id}: {str(e)}") diff --git a/todo/repositories/team_creation_invite_code_repository.py b/todo/repositories/team_creation_invite_code_repository.py new file mode 100644 index 00000000..09f3dea2 --- /dev/null +++ b/todo/repositories/team_creation_invite_code_repository.py @@ -0,0 +1,136 @@ +from typing import Optional, List +from datetime import datetime, timezone + +from todo.repositories.common.mongo_repository import MongoRepository +from todo.models.team_creation_invite_code import TeamCreationInviteCodeModel +from todo.repositories.user_repository import UserRepository +from todo.services.enhanced_dual_write_service import EnhancedDualWriteService + + +class TeamCreationInviteCodeRepository(MongoRepository): + """Repository for team creation invite code operations.""" + + collection_name = TeamCreationInviteCodeModel.collection_name + + @classmethod + def is_code_valid(cls, code: str) -> Optional[dict]: + """Check if a code is available for use (unused).""" + collection = cls.get_collection() + try: + code_data = collection.find_one({"code": code, "is_used": False}) + return code_data + except Exception as e: + raise Exception(f"Error checking if code is valid: {e}") + + @classmethod + def validate_and_consume_code(cls, code: str, used_by: str) -> Optional[dict]: + """Validate and consume a code in one atomic operation using findOneAndUpdate.""" + collection = cls.get_collection() + try: + current_time = datetime.now(timezone.utc) + result = collection.find_one_and_update( + {"code": code, "is_used": False}, + {"$set": {"is_used": True, "used_by": used_by, "used_at": current_time.isoformat()}}, + return_document=True, + ) + + if result: + # Sync the update to PostgreSQL + dual_write_service = EnhancedDualWriteService() + invite_code_data = { + "code": result["code"], + "description": result.get("description"), + "is_used": True, + "created_by": str(result["created_by"]), + "used_by": str(used_by), + "created_at": result.get("created_at"), + "used_at": current_time, + } + + dual_write_success = dual_write_service.update_document( + collection_name="team_creation_invite_codes", data=invite_code_data, mongo_id=str(result["_id"]) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync team creation invite code update {result['_id']} to Postgres") + + return result + except Exception as e: + raise Exception(f"Error validating and consuming code: {e}") + + @classmethod + def create(cls, team_invite_code: TeamCreationInviteCodeModel) -> TeamCreationInviteCodeModel: + collection = cls.get_collection() + team_invite_code.created_at = datetime.now(timezone.utc) + + code_dict = team_invite_code.model_dump(mode="json", by_alias=True, exclude_none=True) + insert_result = collection.insert_one(code_dict) + team_invite_code.id = insert_result.inserted_id + + dual_write_service = EnhancedDualWriteService() + invite_code_data = { + "code": team_invite_code.code, + "description": team_invite_code.description, + "is_used": team_invite_code.is_used, + "created_by": str(team_invite_code.created_by), + "used_by": str(team_invite_code.used_by) if team_invite_code.used_by else None, + "created_at": team_invite_code.created_at, + "used_at": team_invite_code.used_at, + } + + dual_write_success = dual_write_service.create_document( + collection_name="team_creation_invite_codes", data=invite_code_data, mongo_id=str(team_invite_code.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync team creation invite code {team_invite_code.id} to Postgres") + + return team_invite_code + + @classmethod + def get_all_codes(cls, page: int = 1, limit: int = 10) -> tuple[List[dict], int]: + """Get paginated team creation invite codes with user details for created_by and used_by.""" + collection = cls.get_collection() + try: + skip = (page - 1) * limit + + total_count = collection.count_documents({}) + + codes = list(collection.find().sort("created_at", -1).skip(skip).limit(limit)) + + enhanced_codes = [] + for code in codes: + created_by_user = None + used_by_user = None + + if code.get("created_by"): + user = UserRepository.get_by_id(str(code["created_by"])) + if user: + created_by_user = {"id": str(user.id), "name": user.name} + + if code.get("used_by"): + user = UserRepository.get_by_id(str(code["used_by"])) + if user: + used_by_user = {"id": str(user.id), "name": user.name} + + enhanced_code = { + "id": str(code["_id"]), + "code": code["code"], + "description": code.get("description"), + "created_at": code.get("created_at"), + "used_at": code.get("used_at"), + "is_used": code.get("is_used", False), + "created_by": created_by_user or {}, + "used_by": used_by_user, + } + enhanced_codes.append(enhanced_code) + + return enhanced_codes, total_count + except Exception as e: + raise Exception(f"Error getting all codes with user details: {e}") diff --git a/todo/repositories/team_repository.py b/todo/repositories/team_repository.py new file mode 100644 index 00000000..31769287 --- /dev/null +++ b/todo/repositories/team_repository.py @@ -0,0 +1,387 @@ +from datetime import datetime, timezone +from typing import Optional +from bson import ObjectId +from pymongo import ReturnDocument + +from todo.models.team import TeamModel, UserTeamDetailsModel +from todo.repositories.common.mongo_repository import MongoRepository +from todo.services.enhanced_dual_write_service import EnhancedDualWriteService + + +class TeamRepository(MongoRepository): + collection_name = TeamModel.collection_name + + @classmethod + def create(cls, team: TeamModel) -> TeamModel: + """ + Creates a new team in the repository. + """ + teams_collection = cls.get_collection() + team.created_at = datetime.now(timezone.utc) + team.updated_at = datetime.now(timezone.utc) + + team_dict = team.model_dump(mode="json", by_alias=True, exclude_none=True) + insert_result = teams_collection.insert_one(team_dict) + team.id = insert_result.inserted_id + + dual_write_service = EnhancedDualWriteService() + team_data = { + "name": team.name, + "description": team.description, + "invite_code": team.invite_code, + "poc_id": str(team.poc_id) if team.poc_id else None, + "created_by": str(team.created_by), + "updated_by": str(team.updated_by), + "is_deleted": team.is_deleted, + "created_at": team.created_at, + "updated_at": team.updated_at, + } + + dual_write_success = dual_write_service.create_document( + collection_name="teams", data=team_data, mongo_id=str(team.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync team {team.id} to Postgres") + + return team + + @classmethod + def get_by_id(cls, team_id: str) -> Optional[TeamModel]: + """ + Get a team by its ID. + """ + teams_collection = cls.get_collection() + try: + team_data = teams_collection.find_one({"_id": ObjectId(team_id), "is_deleted": False}) + if team_data: + return TeamModel(**team_data) + return None + except Exception: + return None + + @classmethod + def get_by_invite_code(cls, invite_code: str) -> Optional[TeamModel]: + """ + Get a team by its invite code. + """ + teams_collection = cls.get_collection() + try: + team_data = teams_collection.find_one({"invite_code": invite_code, "is_deleted": False}) + if team_data: + return TeamModel(**team_data) + return None + except Exception: + return None + + @classmethod + def update(cls, team_id: str, update_data: dict, updated_by_user_id: str) -> Optional[TeamModel]: + """ + Update a team by its ID using atomic operation to prevent race conditions. + """ + teams_collection = cls.get_collection() + try: + # Add updated_by and updated_at fields + update_data["updated_by"] = updated_by_user_id + update_data["updated_at"] = datetime.now(timezone.utc) + + # Remove None values to avoid overwriting with None + update_data = {k: v for k, v in update_data.items() if v is not None} + + # Use find_one_and_update for atomicity - prevents race conditions + updated_doc = teams_collection.find_one_and_update( + {"_id": ObjectId(team_id), "is_deleted": False}, + {"$set": update_data}, + return_document=ReturnDocument.AFTER, + ) + + if updated_doc: + return TeamModel(**updated_doc) + return None + except Exception: + return None + + @classmethod + def is_user_spoc(cls, team_id: str, user_id: str) -> bool: + """ + Check if the given user is the SPOC (poc_id) for the given team. + """ + team = cls.get_by_id(team_id) + if not team or not team.poc_id: + return False + return str(team.poc_id) == str(user_id) + + @classmethod + def is_user_team_member(cls, team_id: str, user_id: str) -> bool: + """ + Check if the given user is a member of the given team. + """ + team_members = UserTeamDetailsRepository.get_users_by_team_id(team_id) + return user_id in team_members + + +class UserTeamDetailsRepository(MongoRepository): + collection_name = UserTeamDetailsModel.collection_name + + @classmethod + def create(cls, user_team: UserTeamDetailsModel) -> UserTeamDetailsModel: + """ + Creates a new user-team relationship. + """ + collection = cls.get_collection() + user_team.created_at = datetime.now(timezone.utc) + user_team.updated_at = datetime.now(timezone.utc) + + user_team_dict = user_team.model_dump(mode="json", by_alias=True, exclude_none=True) + insert_result = collection.insert_one(user_team_dict) + user_team.id = insert_result.inserted_id + + dual_write_service = EnhancedDualWriteService() + user_team_data = { + "user_id": str(user_team.user_id), + "team_id": str(user_team.team_id), + "created_by": str(user_team.created_by), + "updated_by": str(user_team.updated_by), + "is_active": user_team.is_active, + "created_at": user_team.created_at, + "updated_at": user_team.updated_at, + } + + dual_write_success = dual_write_service.create_document( + collection_name="user_team_details", data=user_team_data, mongo_id=str(user_team.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync user team details {user_team.id} to Postgres") + + return user_team + + @classmethod + def create_many(cls, user_teams: list[UserTeamDetailsModel]) -> list[UserTeamDetailsModel]: + """ + Creates multiple user-team relationships. + """ + collection = cls.get_collection() + current_time = datetime.now(timezone.utc) + + for user_team in user_teams: + user_team.created_at = current_time + user_team.updated_at = current_time + + user_teams_dicts = [ + user_team.model_dump(mode="json", by_alias=True, exclude_none=True) for user_team in user_teams + ] + insert_result = collection.insert_many(user_teams_dicts) + + # Set the inserted IDs + for i, user_team in enumerate(user_teams): + user_team.id = insert_result.inserted_ids[i] + + dual_write_service = EnhancedDualWriteService() + for user_team in user_teams: + user_team_data = { + "user_id": str(user_team.user_id), + "team_id": str(user_team.team_id), + "created_by": str(user_team.created_by), + "updated_by": str(user_team.updated_by), + "is_active": user_team.is_active, + "created_at": user_team.created_at, + "updated_at": user_team.updated_at, + } + + dual_write_success = dual_write_service.create_document( + collection_name="user_team_details", data=user_team_data, mongo_id=str(user_team.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync user team details {user_team.id} to Postgres") + + return user_teams + + @classmethod + def get_by_user_id(cls, user_id: str) -> list[UserTeamDetailsModel]: + """ + Get all team relationships for a specific user. + """ + collection = cls.get_collection() + try: + user_teams_data = collection.find({"user_id": user_id, "is_active": True}) + return [UserTeamDetailsModel(**data) for data in user_teams_data] + except Exception: + return [] + + @classmethod + def get_users_by_team_id(cls, team_id: str) -> list[str]: + """ + Get all user IDs for a specific team. + """ + collection = cls.get_collection() + try: + user_teams_data = list(collection.find({"team_id": team_id, "is_active": True})) + return [data["user_id"] for data in user_teams_data] + except Exception: + return [] + + @classmethod + def get_user_infos_by_team_id(cls, team_id: str) -> list[dict]: + """ + Get all user info (user_id, name, email) for a specific team. + """ + from todo.repositories.user_repository import UserRepository + + user_ids = cls.get_users_by_team_id(team_id) + user_infos = [] + for user_id in user_ids: + user = UserRepository.get_by_id(user_id) + if user: + user_infos.append({"user_id": user_id, "name": user.name, "email": user.email_id}) + return user_infos + + @classmethod + def get_users_and_added_on_by_team_id(cls, team_id: str) -> list[dict]: + """ + Get all user IDs and their addedOn (created_at) for a specific team. + """ + collection = cls.get_collection() + try: + user_teams_data = list(collection.find({"team_id": team_id, "is_active": True})) + return [{"user_id": data["user_id"], "added_on": data.get("created_at")} for data in user_teams_data] + except Exception: + return [] + + @classmethod + def get_by_team_id(cls, team_id: str) -> list[UserTeamDetailsModel]: + """ + Get all user-team relationships for a specific team. + """ + collection = cls.get_collection() + try: + user_teams_data = collection.find({"team_id": team_id, "is_active": True}) + return [UserTeamDetailsModel(**data) for data in user_teams_data] + except Exception: + return [] + + @classmethod + def remove_user_from_team(cls, team_id: str, user_id: str, updated_by_user_id: str) -> bool: + """ + Remove a user from a team by setting is_active to False. + """ + collection = cls.get_collection() + try: + current_relationship = collection.find_one({"team_id": team_id, "user_id": user_id, "is_active": True}) + if not current_relationship: + return False + + result = collection.update_one( + {"team_id": team_id, "user_id": user_id, "is_active": True}, + { + "$set": { + "is_active": False, + "updated_by": updated_by_user_id, + "updated_at": datetime.now(timezone.utc), + } + }, + ) + + if result.modified_count > 0: + dual_write_service = EnhancedDualWriteService() + user_team_data = { + "user_id": str(current_relationship["user_id"]), + "team_id": str(current_relationship["team_id"]), + "is_active": False, + "created_by": str(current_relationship["created_by"]), + "updated_by": str(updated_by_user_id), + "created_at": current_relationship["created_at"], + "updated_at": datetime.now(timezone.utc), + } + + dual_write_success = dual_write_service.update_document( + collection_name="user_team_details", data=user_team_data, mongo_id=str(current_relationship["_id"]) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync user team removal {current_relationship['_id']} to Postgres") + + return result.modified_count > 0 + except Exception: + return False + + @classmethod + def add_user_to_team( + cls, team_id: str, user_id: str, role_id: str, created_by_user_id: str + ) -> UserTeamDetailsModel: + """ + Add a user to a team. + """ + collection = cls.get_collection() + # Check if user is already in the team + existing_relationship = collection.find_one({"team_id": team_id, "user_id": user_id}) + + if existing_relationship: + # If user exists but is inactive, reactivate them + if not existing_relationship.get("is_active", True): + collection.update_one( + {"_id": existing_relationship["_id"]}, + { + "$set": { + "is_active": True, + "role_id": role_id, + "updated_by": created_by_user_id, + "updated_at": datetime.now(timezone.utc), + } + }, + ) + return UserTeamDetailsModel(**existing_relationship) + else: + # User is already active in the team + return UserTeamDetailsModel(**existing_relationship) + + # Create new relationship + user_team = UserTeamDetailsModel( + user_id=user_id, + team_id=team_id, + role_id=role_id, + is_active=True, + created_by=created_by_user_id, + updated_by=created_by_user_id, + ) + return cls.create(user_team) + + @classmethod + def update_team_members(cls, team_id: str, member_ids: list[str], updated_by_user_id: str) -> bool: + """ + Update team members by replacing the current members with the new list. + """ + try: + # Get current team members + current_members = cls.get_users_by_team_id(team_id) + + # Find members to remove (in current but not in new list) + members_to_remove = [user_id for user_id in current_members if user_id not in member_ids] + + # Find members to add (in new list but not in current) + members_to_add = [user_id for user_id in member_ids if user_id not in current_members] + + # Remove members + for user_id in members_to_remove: + cls.remove_user_from_team(team_id, user_id, updated_by_user_id) + + # Add new members + for user_id in members_to_add: + cls.add_user_to_team(team_id, user_id, "1", updated_by_user_id) # Default role_id is "1" + + return True + except Exception: + return False diff --git a/todo/repositories/user_repository.py b/todo/repositories/user_repository.py new file mode 100644 index 00000000..a4e2ffe0 --- /dev/null +++ b/todo/repositories/user_repository.py @@ -0,0 +1,125 @@ +from datetime import datetime, timezone +from typing import Optional, List +from pymongo.collection import ReturnDocument +from pymongo import ASCENDING + +from todo.models.user import UserModel +from todo.models.common.pyobjectid import PyObjectId +from todo_project.db.config import DatabaseManager +from todo.constants.messages import RepositoryErrors +from todo.exceptions.auth_exceptions import UserNotFoundException, APIException +from todo.services.enhanced_dual_write_service import EnhancedDualWriteService + + +class UserRepository: + @classmethod + def _get_collection(cls): + return DatabaseManager().get_collection("users") + + @classmethod + def get_by_id(cls, user_id: str) -> Optional[UserModel]: + try: + collection = cls._get_collection() + object_id = PyObjectId(user_id) + doc = collection.find_one({"_id": object_id}) + return UserModel(**doc) if doc else None + except Exception as e: + raise UserNotFoundException() from e + + @classmethod + def get_by_ids(cls, user_ids: List[str]) -> List[UserModel]: + """ + Get multiple users by their IDs in a single database query. + Returns only the users that exist. + """ + try: + if not user_ids: + return [] + + collection = cls._get_collection() + object_ids = [PyObjectId(user_id) for user_id in user_ids] + cursor = collection.find({"_id": {"$in": object_ids}}) + return [UserModel(**doc) for doc in cursor] + except Exception as e: + raise UserNotFoundException() from e + + @classmethod + def create_or_update(cls, user_data: dict) -> UserModel: + try: + collection = cls._get_collection() + now = datetime.now(timezone.utc) + google_id = user_data["google_id"] + + result = collection.find_one_and_update( + {"google_id": google_id}, + { + "$set": { + "email_id": user_data["email"], + "name": user_data["name"], + "picture": user_data.get("picture"), + "updated_at": now, + }, + "$setOnInsert": {"google_id": google_id, "created_at": now}, + }, + upsert=True, + return_document=ReturnDocument.AFTER, + ) + + if not result: + raise APIException(RepositoryErrors.USER_OPERATION_FAILED) + + user_model = UserModel(**result) + + dual_write_service = EnhancedDualWriteService() + user_data_for_postgres = { + "name": user_model.name, + "email_id": user_model.email_id, + "google_id": user_model.google_id, + "picture": user_model.picture, + "created_at": user_model.created_at, + "updated_at": user_model.updated_at, + } + + dual_write_success = dual_write_service.create_document( + collection_name="users", data=user_data_for_postgres, mongo_id=str(user_model.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync user {user_model.id} to Postgres") + + return user_model + + except Exception as e: + if isinstance(e, APIException): + raise + raise APIException(RepositoryErrors.USER_CREATE_UPDATE_FAILED.format(str(e))) + + @classmethod + def search_users(cls, query: str, page: int = 1, limit: int = 10) -> tuple[List[UserModel], int]: + """ + Search users by name or email using fuzzy search with MongoDB regex + """ + + collection = cls._get_collection() + regex_pattern = {"$regex": query, "$options": "i"} + search_filter = {"$or": [{"name": regex_pattern}, {"email_id": regex_pattern}]} + skip = (page - 1) * limit + total_count = collection.count_documents(search_filter) + cursor = collection.find(search_filter).sort("name", ASCENDING).skip(skip).limit(limit) + users = [UserModel(**doc) for doc in cursor] + return users, total_count + + @classmethod + def get_all_users(cls, page: int = 1, limit: int = 10) -> tuple[List[UserModel], int]: + """ + Get all users with pagination + """ + collection = cls._get_collection() + skip = (page - 1) * limit + total_count = collection.count_documents({}) + cursor = collection.find().sort("name", ASCENDING).skip(skip).limit(limit) + users = [UserModel(**doc) for doc in cursor] + return users, total_count diff --git a/todo/repositories/user_role_repository.py b/todo/repositories/user_role_repository.py new file mode 100644 index 00000000..c5c93afc --- /dev/null +++ b/todo/repositories/user_role_repository.py @@ -0,0 +1,165 @@ +from datetime import datetime, timezone +from typing import List, Optional +import logging +from bson import ObjectId + +from todo.models.user_role import UserRoleModel +from todo.repositories.common.mongo_repository import MongoRepository +from todo.constants.role import RoleScope, RoleName +from todo.services.enhanced_dual_write_service import EnhancedDualWriteService + +logger = logging.getLogger(__name__) + + +class UserRoleRepository(MongoRepository): + collection_name = UserRoleModel.collection_name + + @classmethod + def create(cls, user_role: UserRoleModel) -> UserRoleModel: + collection = cls.get_collection() + + role_name_value = user_role.role_name.value if hasattr(user_role.role_name, "value") else user_role.role_name + scope_value = user_role.scope.value if hasattr(user_role.scope, "value") else user_role.scope + + # Check if already exists and is active + existing = collection.find_one( + { + "user_id": user_role.user_id, + "role_name": role_name_value, + "scope": scope_value, + "team_id": user_role.team_id, + "is_active": True, + } + ) + + if existing: + return UserRoleModel(**existing) + + user_role.created_at = datetime.now(timezone.utc) + user_role_dict = user_role.model_dump(mode="json", by_alias=True, exclude_none=True) + result = collection.insert_one(user_role_dict) + user_role.id = result.inserted_id + + dual_write_service = EnhancedDualWriteService() + user_role_data = { + "user_id": user_role.user_id, + "role_name": user_role.role_name.value if hasattr(user_role.role_name, "value") else user_role.role_name, + "scope": user_role.scope.value if hasattr(user_role.scope, "value") else user_role.scope, + "team_id": user_role.team_id, + "is_active": user_role.is_active, + "created_at": user_role.created_at, + "created_by": user_role.created_by, + } + + dual_write_success = dual_write_service.create_document( + collection_name="user_roles", data=user_role_data, mongo_id=str(user_role.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync user role {user_role.id} to Postgres") + + return user_role + + @classmethod + def get_user_roles( + cls, user_id: Optional[str] = None, scope: Optional["RoleScope"] = None, team_id: Optional[str] = None + ) -> List[UserRoleModel]: + collection = cls.get_collection() + + query = {"is_active": True} + + if user_id: + query["user_id"] = user_id + + if scope: + scope_value = scope.value if hasattr(scope, "value") else scope + query["scope"] = scope_value + + if team_id: + query["team_id"] = team_id + elif scope and (scope.value if hasattr(scope, "value") else scope) == "GLOBAL": + query["team_id"] = None + + roles = [] + for doc in collection.find(query): + roles.append(UserRoleModel(**doc)) + return roles + + @classmethod + def get_by_user_role_scope_team(cls, user_id: str, role_id: str, scope: str, team_id: Optional[str] = None): + collection = cls.get_collection() + + try: + object_id = ObjectId(role_id) + except Exception: + return None + + query = {"_id": object_id, "user_id": user_id, "scope": scope, "is_active": True} + + if scope == "TEAM" and team_id: + query["team_id"] = team_id + elif scope == "GLOBAL": + query["team_id"] = None + + result = collection.find_one(query) + if result: + return UserRoleModel(**result) + return None + + @classmethod + def assign_role( + cls, user_id: str, role_name: "RoleName", scope: "RoleScope", team_id: Optional[str] = None + ) -> UserRoleModel: + """Assign a role to a user - simple and clean.""" + user_role = UserRoleModel(user_id=user_id, role_name=role_name, scope=scope, team_id=team_id, is_active=True) + return cls.create(user_role) + + @classmethod + def remove_role_by_id(cls, user_id: str, role_id: str, scope: str, team_id: Optional[str] = None) -> bool: + """Remove a role from a user by role_id - simple deactivation.""" + collection = cls.get_collection() + + try: + object_id = ObjectId(role_id) + except Exception: + return False + + query = {"_id": object_id, "user_id": user_id, "scope": scope, "is_active": True} + + if scope == "TEAM" and team_id: + query["team_id"] = team_id + elif scope == "GLOBAL": + query["team_id"] = None + + current_role = collection.find_one(query) + if not current_role: + return False + + result = collection.update_one(query, {"$set": {"is_active": False}}) + + if result.modified_count > 0: + dual_write_service = EnhancedDualWriteService() + user_role_data = { + "user_id": str(current_role["user_id"]), + "role_name": current_role["role_name"], + "scope": current_role["scope"], + "team_id": str(current_role["team_id"]) if current_role.get("team_id") else None, + "is_active": False, + "created_at": current_role["created_at"], + "created_by": str(current_role["created_by"]), + } + + dual_write_success = dual_write_service.update_document( + collection_name="user_roles", data=user_role_data, mongo_id=str(current_role["_id"]) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync user role removal {current_role['_id']} to Postgres") + + return result.modified_count > 0 diff --git a/todo/repositories/user_team_details_repository.py b/todo/repositories/user_team_details_repository.py new file mode 100644 index 00000000..e44c2935 --- /dev/null +++ b/todo/repositories/user_team_details_repository.py @@ -0,0 +1,68 @@ +from bson import ObjectId +from todo.repositories.common.mongo_repository import MongoRepository +from todo.services.enhanced_dual_write_service import EnhancedDualWriteService + + +class UserTeamDetailsRepository(MongoRepository): + collection_name = "user_team_details" + + @classmethod + def get_by_user_and_team(cls, user_id: str, team_id: str): + collection = cls.get_collection() + try: + user_id_obj = ObjectId(user_id) + except Exception: + user_id_obj = user_id + try: + team_id_obj = ObjectId(team_id) + except Exception: + team_id_obj = team_id + + queries = [ + {"user_id": user_id_obj, "team_id": team_id_obj}, + {"user_id": user_id, "team_id": team_id_obj}, + {"user_id": user_id_obj, "team_id": team_id}, + {"user_id": user_id, "team_id": team_id}, + ] + + for query in queries: + result = collection.find_one(query) + if result: + return result + return None + + @classmethod + def remove_member_from_team(cls, user_id: str, team_id: str) -> bool: + collection = cls.get_collection() + try: + user_id_obj = ObjectId(user_id) + except Exception: + user_id_obj = user_id + try: + team_id_obj = ObjectId(team_id) + except Exception: + team_id_obj = team_id + queries = [ + {"user_id": user_id_obj, "team_id": team_id_obj}, + {"user_id": user_id, "team_id": team_id_obj}, + {"user_id": user_id_obj, "team_id": team_id}, + {"user_id": user_id, "team_id": team_id}, + ] + for query in queries: + document = collection.find_one(query) + if document: + result = collection.delete_one(query) + if result.deleted_count > 0: + dual_write_service = EnhancedDualWriteService() + dual_write_success = dual_write_service.delete_document( + collection_name="user_team_details", mongo_id=str(document["_id"]) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync user team details deletion {document['_id']} to Postgres") + + return True + return False diff --git a/todo/repositories/watchlist_repository.py b/todo/repositories/watchlist_repository.py new file mode 100644 index 00000000..36625216 --- /dev/null +++ b/todo/repositories/watchlist_repository.py @@ -0,0 +1,372 @@ +from datetime import datetime, timezone +from typing import List, Tuple +from typing import Optional + +from todo.repositories.common.mongo_repository import MongoRepository +from todo.models.watchlist import WatchlistModel +from todo.dto.watchlist_dto import WatchlistDTO +from bson import ObjectId +from todo.services.enhanced_dual_write_service import EnhancedDualWriteService + + +def _convert_objectids_to_str(obj): + """Recursively convert all ObjectId values in a dict/list to strings.""" + if isinstance(obj, dict): + return {k: _convert_objectids_to_str(v) for k, v in obj.items()} + elif isinstance(obj, list): + return [_convert_objectids_to_str(item) for item in obj] + elif isinstance(obj, ObjectId): + return str(obj) + else: + return obj + + +class WatchlistRepository(MongoRepository): + collection_name = WatchlistModel.collection_name + + @classmethod + def get_by_user_and_task(cls, user_id: str, task_id: str) -> Optional[WatchlistModel]: + doc = cls.get_collection().find_one({"userId": user_id, "taskId": task_id}) + if doc: + # Convert ObjectId fields to strings for the model + if "updatedBy" in doc and doc["updatedBy"]: + doc["updatedBy"] = str(doc["updatedBy"]) + return WatchlistModel(**doc) + return None + + @classmethod + def create(cls, watchlist_model: WatchlistModel) -> WatchlistModel: + doc = watchlist_model.model_dump(by_alias=True) + doc.pop("_id", None) + insert_result = cls.get_collection().insert_one(doc) + watchlist_model.id = str(insert_result.inserted_id) + + dual_write_service = EnhancedDualWriteService() + watchlist_data = { + "task_id": str(watchlist_model.taskId), + "user_id": str(watchlist_model.userId), + "is_active": watchlist_model.isActive, + "created_by": str(watchlist_model.createdBy), + "created_at": watchlist_model.createdAt, + "updated_by": str(watchlist_model.updatedBy) if watchlist_model.updatedBy else None, + "updated_at": watchlist_model.updatedAt, + } + + dual_write_success = dual_write_service.create_document( + collection_name="watchlists", data=watchlist_data, mongo_id=str(watchlist_model.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync watchlist {watchlist_model.id} to Postgres") + + return watchlist_model + + @classmethod + def get_watchlisted_tasks(cls, page, limit, user_id) -> Tuple[int, List[WatchlistDTO]]: + """ + Get paginated list of watchlisted tasks with assignee details. + The assignee represents who the task belongs to (who is responsible for completing the task). + """ + watchlist_collection = cls.get_collection() + + query = {"userId": user_id, "isActive": True} + + zero_indexed_page = page - 1 + skip = zero_indexed_page * limit + + pipeline = [ + {"$match": query}, + { + "$facet": { + "data": [ + { + "$lookup": { + "from": "tasks", + "let": {"taskIdStr": "$taskId"}, + "pipeline": [{"$match": {"$expr": {"$eq": ["$_id", {"$toObjectId": "$$taskIdStr"}]}}}], + "as": "task", + } + }, + {"$unwind": "$task"}, + { + "$addFields": { + "lastAdded": {"$ifNull": [{"$toDate": "$updatedAt"}, {"$toDate": "$createdAt"}]}, + "lastActivity": { + "$ifNull": [{"$toDate": "$task.updatedAt"}, {"$toDate": "$task.createdAt"}] + }, + } + }, + { + "$addFields": { + "lastEvent": { + "$cond": { + "if": {"$gt": ["$lastAdded", "$lastActivity"]}, + "then": "$lastAdded", + "else": "$lastActivity", + } + } + } + }, + {"$sort": {"lastEvent": -1}}, + { + "$lookup": { + "from": "users", + "let": {"createdById": "$task.createdBy"}, + "pipeline": [ + {"$match": {"$expr": {"$eq": ["$_id", {"$toObjectId": "$$createdById"}]}}} + ], + "as": "created_by_user", + } + }, + { + "$lookup": { + "from": "task_details", + "let": {"taskIdStr": "$taskId"}, + "pipeline": [ + { + "$match": { + "$expr": { + "$and": [ + {"$eq": ["$task_id", {"$toObjectId": "$$taskIdStr"}]}, + {"$eq": ["$is_active", True]}, + ] + } + } + } + ], + "as": "assignment", + } + }, + { + "$lookup": { + "from": "users", + "let": {"assigneeId": {"$arrayElemAt": ["$assignment.assignee_id", 0]}}, + "pipeline": [ + { + "$match": { + "$expr": { + "$and": [ + {"$eq": ["$_id", "$$assigneeId"]}, + {"$eq": [{"$arrayElemAt": ["$assignment.user_type", 0]}, "user"]}, + ] + } + } + } + ], + "as": "assignee_user", + } + }, + { + "$lookup": { + "from": "teams", + "let": {"assigneeId": {"$arrayElemAt": ["$assignment.assignee_id", 0]}}, + "pipeline": [ + { + "$match": { + "$expr": { + "$and": [ + {"$eq": ["$_id", "$$assigneeId"]}, + {"$eq": [{"$arrayElemAt": ["$assignment.user_type", 0]}, "team"]}, + ] + } + } + } + ], + "as": "assignee_team", + } + }, + { + "$replaceRoot": { + "newRoot": { + "$mergeObjects": [ + "$task", + { + "watchlistId": {"$toString": "$_id"}, + "taskId": {"$toString": "$task._id"}, + "deferredDetails": "$task.deferredDetails", + "createdBy": { + "id": {"$toString": {"$arrayElemAt": ["$created_by_user._id", 0]}}, + "name": {"$arrayElemAt": ["$created_by_user.name", 0]}, + "addedOn": {"$arrayElemAt": ["$created_by_user.addedOn", 0]}, + "tasksAssignedCount": { + "$arrayElemAt": ["$created_by_user.tasksAssignedCount", 0] + }, + }, + "assignee": { + "$cond": { + "if": {"$gt": [{"$size": "$assignee_user"}, 0]}, + "then": { + "assignee_id": { + "$toString": {"$arrayElemAt": ["$assignee_user._id", 0]} + }, + "assignee_name": {"$arrayElemAt": ["$assignee_user.name", 0]}, + "user_type": "user", + }, + "else": { + "$cond": { + "if": {"$gt": [{"$size": "$assignee_team"}, 0]}, + "then": { + "assignee_id": { + "$toString": { + "$arrayElemAt": ["$assignee_team._id", 0] + } + }, + "assignee_name": { + "$arrayElemAt": ["$assignee_team.name", 0] + }, + "user_type": "team", + }, + "else": None, + } + }, + } + }, + }, + ] + } + } + }, + {"$skip": skip}, + {"$limit": limit}, + ], + "total": [{"$count": "value"}], + } + }, + {"$addFields": {"total": {"$ifNull": [{"$arrayElemAt": ["$total.value", 0]}, 0]}}}, + ] + + aggregation_result = watchlist_collection.aggregate(pipeline) + result = next(aggregation_result, {"total": 0, "data": []}) + count = result.get("total", 0) + + tasks = [_convert_objectids_to_str(doc) for doc in result.get("data", [])] + + # If assignee is null, try to fetch it separately + for task in tasks: + if not task.get("assignee"): + task["assignee"] = cls._get_assignee_for_task(task.get("taskId")) + + # If createdBy is null or still an ID, try to fetch user details separately + if not task.get("createdBy") or ( + isinstance(task.get("createdBy"), str) and ObjectId.is_valid(task.get("createdBy", "")) + ): + task["createdBy"] = cls._get_user_dto_for_id(task.get("createdBy")) + + tasks = [WatchlistDTO(**doc) for doc in tasks] + + return count, tasks + + @classmethod + def _get_assignee_for_task(cls, task_id: str): + """ + Fallback method to get assignee details for a task. + """ + if not task_id: + return None + + try: + from todo.repositories.task_assignment_repository import TaskAssignmentRepository + from todo.repositories.user_repository import UserRepository + from todo.repositories.team_repository import TeamRepository + + # Get task assignment + assignment = TaskAssignmentRepository.get_by_task_id(task_id) + if not assignment: + return None + + assignee_id = str(assignment.assignee_id) + user_type = assignment.user_type + + if user_type == "user": + # Get user details + user = UserRepository.get_by_id(assignee_id) + if user: + return {"assignee_id": assignee_id, "assignee_name": user.name, "user_type": "user"} + elif user_type == "team": + # Get team details + team = TeamRepository.get_by_id(assignee_id) + if team: + return {"assignee_id": assignee_id, "assignee_name": team.name, "user_type": "team"} + + except Exception: + # If any error occurs, return None + return None + + return None + + @classmethod + def _get_user_dto_for_id(cls, user_id: str): + """ + Fallback method to get user details for createdBy field. + """ + if not user_id: + return None + + try: + from todo.repositories.user_repository import UserRepository + + # Get user details + user = UserRepository.get_by_id(user_id) + if user: + return { + "id": str(user.id), + "name": user.name, + "addedOn": getattr(user, "addedOn", None), + "tasksAssignedCount": getattr(user, "tasksAssignedCount", None), + } + except Exception: + # If any error occurs, return None + pass + + return None + + @classmethod + def update(cls, taskId: ObjectId, isActive: bool, userId: ObjectId) -> dict: + """ + Update the watchlist status of a task. + """ + watchlist_collection = cls.get_collection() + + current_watchlist = cls.get_by_user_and_task(str(userId), str(taskId)) + if not current_watchlist: + return None + + update_result = watchlist_collection.update_one( + {"userId": str(userId), "taskId": str(taskId)}, + { + "$set": { + "isActive": isActive, + "updatedAt": datetime.now(timezone.utc), + "updatedBy": userId, + } + }, + ) + + if update_result.modified_count > 0: + dual_write_service = EnhancedDualWriteService() + watchlist_data = { + "task_id": str(current_watchlist.taskId), + "user_id": str(current_watchlist.userId), + "is_active": isActive, + "created_by": str(current_watchlist.createdBy), + "created_at": current_watchlist.createdAt, + "updated_by": str(userId), + "updated_at": datetime.now(timezone.utc), + } + + dual_write_success = dual_write_service.update_document( + collection_name="watchlists", data=watchlist_data, mongo_id=str(current_watchlist.id) + ) + + if not dual_write_success: + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to sync watchlist update {current_watchlist.id} to Postgres") + + if update_result.modified_count == 0: + return None + return update_result diff --git a/todo/serializers/add_team_member_serializer.py b/todo/serializers/add_team_member_serializer.py new file mode 100644 index 00000000..5c17b2bb --- /dev/null +++ b/todo/serializers/add_team_member_serializer.py @@ -0,0 +1,7 @@ +from rest_framework import serializers + + +class AddTeamMemberSerializer(serializers.Serializer): + member_ids = serializers.ListField( + child=serializers.CharField(), min_length=1, help_text="List of user IDs to add to the team" + ) diff --git a/todo/serializers/create_task_assignment_serializer.py b/todo/serializers/create_task_assignment_serializer.py new file mode 100644 index 00000000..a7450387 --- /dev/null +++ b/todo/serializers/create_task_assignment_serializer.py @@ -0,0 +1,30 @@ +from rest_framework import serializers +from bson import ObjectId +from todo.constants.messages import ValidationErrors + + +class CreateTaskAssignmentSerializer(serializers.Serializer): + task_id = serializers.CharField(required=True) + assignee_id = serializers.CharField(required=True) + user_type = serializers.ChoiceField( + required=True, choices=["user", "team"], help_text="Type of assignee: 'user' or 'team'" + ) + + def validate_task_id(self, value): + if not ObjectId.is_valid(value): + raise serializers.ValidationError(ValidationErrors.INVALID_OBJECT_ID.format(value)) + return value + + def validate_assignee_id(self, value): + if not ObjectId.is_valid(value): + raise serializers.ValidationError(ValidationErrors.INVALID_OBJECT_ID.format(value)) + return value + + def validate_user_type(self, value): + if value not in ["user", "team"]: + raise serializers.ValidationError("user_type must be either 'user' or 'team'") + return value + + +class AssignTaskToUserSerializer(serializers.Serializer): + assignee_id = serializers.CharField(help_text="User ID to assign the task to") diff --git a/todo/serializers/create_task_serializer.py b/todo/serializers/create_task_serializer.py index 67025371..a4a6e193 100644 --- a/todo/serializers/create_task_serializer.py +++ b/todo/serializers/create_task_serializer.py @@ -1,30 +1,47 @@ from rest_framework import serializers from bson import ObjectId -from datetime import datetime, timezone +from datetime import datetime from todo.constants.task import TaskPriority, TaskStatus from todo.constants.messages import ValidationErrors +from zoneinfo import ZoneInfo, ZoneInfoNotFoundError class CreateTaskSerializer(serializers.Serializer): - title = serializers.CharField(required=True, allow_blank=False) - description = serializers.CharField(required=False, allow_blank=True, allow_null=True) + title = serializers.CharField(required=True, allow_blank=False, help_text="Title of the task") + description = serializers.CharField( + required=False, allow_blank=True, allow_null=True, help_text="Description of the task" + ) priority = serializers.ChoiceField( required=False, choices=[priority.name for priority in TaskPriority], default=TaskPriority.LOW.name, + help_text="Priority of the task (LOW, MEDIUM, HIGH)", ) status = serializers.ChoiceField( required=False, choices=[status.name for status in TaskStatus], default=TaskStatus.TODO.name, + help_text="Status of the task (TODO, IN_PROGRESS, DONE)", + ) + # Accept assignee_id and user_type at the top level + assignee_id = serializers.CharField( + required=False, allow_null=True, help_text="User or team ID to assign the task to" + ) + user_type = serializers.ChoiceField( + required=False, choices=["user", "team"], allow_null=True, help_text="Type of assignee: 'user' or 'team'" ) - assignee = serializers.CharField(required=False, allow_blank=True, allow_null=True) labels = serializers.ListField( child=serializers.CharField(), required=False, default=list, + help_text="List of label IDs", + ) + timezone = serializers.CharField( + required=True, allow_null=False, help_text="IANA timezone string like 'Asia/Kolkata'" + ) + dueAt = serializers.DateTimeField( + required=False, allow_null=True, help_text="Due date and time in ISO format (UTC)" ) - dueAt = serializers.DateTimeField(required=False, allow_null=True) def validate_title(self, value): if not value.strip(): @@ -37,14 +54,34 @@ def validate_labels(self, value): raise serializers.ValidationError(ValidationErrors.INVALID_OBJECT_ID.format(label_id)) return value - def validate_dueAt(self, value): - if value is not None: - now = datetime.now(timezone.utc) - if value <= now: - raise serializers.ValidationError(ValidationErrors.PAST_DUE_DATE) - return value + def validate(self, data): + # Compose the 'assignee' dict if assignee_id and user_type are present + assignee_id = data.pop("assignee_id", None) + user_type = data.pop("user_type", None) + if assignee_id and user_type: + if not ObjectId.is_valid(assignee_id): + raise serializers.ValidationError( + {"assignee_id": ValidationErrors.INVALID_OBJECT_ID.format(assignee_id)} + ) + if user_type not in ["user", "team"]: + raise serializers.ValidationError({"user_type": "user_type must be either 'user' or 'team'"}) + data["assignee"] = {"assignee_id": assignee_id, "user_type": user_type} - def validate_assignee(self, value): - if isinstance(value, str) and not value.strip(): - return None - return value + due_at = data.get("dueAt") + timezone_str = data.get("timezone") + + if due_at: + if not timezone_str: + raise serializers.ValidationError({"timezone": ValidationErrors.REQUIRED_TIMEZONE}) + try: + tz = ZoneInfo(timezone_str) + except ZoneInfoNotFoundError: + raise serializers.ValidationError({"timezone": ValidationErrors.INVALID_TIMEZONE}) + + now_date = datetime.now(tz).date() + value_date = due_at.astimezone(tz).date() + + if value_date < now_date: + raise serializers.ValidationError({"dueAt": ValidationErrors.PAST_DUE_DATE}) + + return data diff --git a/todo/serializers/create_team_serializer.py b/todo/serializers/create_team_serializer.py new file mode 100644 index 00000000..f01ab322 --- /dev/null +++ b/todo/serializers/create_team_serializer.py @@ -0,0 +1,33 @@ +from bson import ObjectId +from rest_framework import serializers + +from todo.constants.messages import ValidationErrors + + +class CreateTeamSerializer(serializers.Serializer): + """ + The poc_id represents the team's point of contact and is optional. + """ + + name = serializers.CharField(max_length=100) + description = serializers.CharField(max_length=500, required=False, allow_blank=True) + member_ids = serializers.ListField(child=serializers.CharField(), required=False, default=list) + poc_id = serializers.CharField(required=False, allow_null=True, allow_blank=True) + team_invite_code = serializers.CharField(max_length=20, min_length=6) + + def validate_poc_id(self, value): + if not value or not value.strip(): + return None + if not ObjectId.is_valid(value): + raise serializers.ValidationError(ValidationErrors.INVALID_OBJECT_ID.format(value)) + return value + + def validate_member_ids(self, value): + for member_id in value: + if not ObjectId.is_valid(member_id): + raise serializers.ValidationError(ValidationErrors.INVALID_OBJECT_ID.format(member_id)) + return value + + +class JoinTeamByInviteCodeSerializer(serializers.Serializer): + invite_code = serializers.CharField(max_length=100) diff --git a/todo/serializers/create_watchlist_serializer.py b/todo/serializers/create_watchlist_serializer.py new file mode 100644 index 00000000..c59a6983 --- /dev/null +++ b/todo/serializers/create_watchlist_serializer.py @@ -0,0 +1,15 @@ +from rest_framework import serializers +from bson import ObjectId + +from todo.constants.messages import ValidationErrors + + +class CreateWatchlistSerializer(serializers.Serializer): + taskId = serializers.CharField(required=True) + + def validate_taskId(self, value): + try: + ObjectId(str(value)) + except Exception: + raise serializers.ValidationError(ValidationErrors.INVALID_TASK_ID_FORMAT) + return value diff --git a/todo/serializers/defer_task_serializer.py b/todo/serializers/defer_task_serializer.py new file mode 100644 index 00000000..20567676 --- /dev/null +++ b/todo/serializers/defer_task_serializer.py @@ -0,0 +1,12 @@ +from rest_framework import serializers +from datetime import datetime, timezone +from todo.constants.messages import ValidationErrors + + +class DeferTaskSerializer(serializers.Serializer): + deferredTill = serializers.DateTimeField() + + def validate_deferredTill(self, value): + if value < datetime.now(timezone.utc): + raise serializers.ValidationError(ValidationErrors.PAST_DEFERRED_TILL_DATE) + return value diff --git a/todo/serializers/get_labels_serializer.py b/todo/serializers/get_labels_serializer.py new file mode 100644 index 00000000..b294f15e --- /dev/null +++ b/todo/serializers/get_labels_serializer.py @@ -0,0 +1,35 @@ +from rest_framework import serializers +from django.conf import settings + +from todo.constants.messages import ValidationErrors + + +class GetLabelQueryParamsSerializer(serializers.Serializer): + page = serializers.IntegerField( + required=False, + default=1, + min_value=1, + error_messages={ + "min_value": ValidationErrors.PAGE_POSITIVE, + }, + ) + limit = serializers.IntegerField( + required=False, + default=10, + min_value=1, + max_value=settings.REST_FRAMEWORK["DEFAULT_PAGINATION_SETTINGS"]["MAX_PAGE_LIMIT"], + error_messages={ + "min_value": ValidationErrors.LIMIT_POSITIVE, + }, + ) + search = serializers.CharField( + required=False, + default="", + allow_blank=True, + error_messages={ + "invalid": ValidationErrors.INVALID_SEARCH_QUERY_TYPE, + }, + ) + + def validate_search(self, value: str) -> str: + return value.strip() if value else "" diff --git a/todo/serializers/get_roles_serializer.py b/todo/serializers/get_roles_serializer.py new file mode 100644 index 00000000..b0c61721 --- /dev/null +++ b/todo/serializers/get_roles_serializer.py @@ -0,0 +1,8 @@ +from rest_framework import serializers +from todo.constants.role import ROLE_SCOPE_CHOICES + + +class RoleQuerySerializer(serializers.Serializer): + is_active = serializers.BooleanField(required=False, default=None, allow_null=True) + name = serializers.CharField(required=False, max_length=100) + scope = serializers.ChoiceField(choices=ROLE_SCOPE_CHOICES, required=False) diff --git a/todo/serializers/get_tasks_serializer.py b/todo/serializers/get_tasks_serializer.py index 1e4ac2dc..16f28f7e 100644 --- a/todo/serializers/get_tasks_serializer.py +++ b/todo/serializers/get_tasks_serializer.py @@ -1,6 +1,15 @@ from rest_framework import serializers from django.conf import settings +from todo.constants.task import SORT_FIELDS, SORT_ORDERS, SORT_FIELD_UPDATED_AT, SORT_FIELD_DEFAULT_ORDERS, TaskStatus + + +class CaseInsensitiveChoiceField(serializers.ChoiceField): + def to_internal_value(self, data): + if isinstance(data, str): + data = data.upper() + return super().to_internal_value(data) + class GetTaskQueryParamsSerializer(serializers.Serializer): page = serializers.IntegerField( @@ -20,3 +29,32 @@ class GetTaskQueryParamsSerializer(serializers.Serializer): "min_value": "limit must be greater than or equal to 1", }, ) + + profile = serializers.BooleanField(required=False, error_messages={"invalid": "profile must be a boolean value."}) + + sort_by = serializers.ChoiceField( + choices=SORT_FIELDS, + required=False, + default=SORT_FIELD_UPDATED_AT, + ) + order = serializers.ChoiceField( + choices=SORT_ORDERS, + required=False, + ) + + teamId = serializers.CharField(required=False, allow_blank=False, allow_null=True) + + status = CaseInsensitiveChoiceField( + choices=[status.value for status in TaskStatus], + required=False, + allow_null=True, + ) + + def validate(self, attrs): + validated_data = super().validate(attrs) + + if "order" not in validated_data or validated_data["order"] is None: + sort_by = validated_data.get("sort_by", SORT_FIELD_UPDATED_AT) + validated_data["order"] = SORT_FIELD_DEFAULT_ORDERS[sort_by] + + return validated_data diff --git a/todo/serializers/get_watchlist_tasks_serializer.py b/todo/serializers/get_watchlist_tasks_serializer.py new file mode 100644 index 00000000..2ec2d598 --- /dev/null +++ b/todo/serializers/get_watchlist_tasks_serializer.py @@ -0,0 +1,24 @@ +from rest_framework import serializers +from django.conf import settings + +from todo.constants.messages import ValidationErrors + + +class GetWatchlistTaskQueryParamsSerializer(serializers.Serializer): + page = serializers.IntegerField( + required=False, + default=1, + min_value=1, + error_messages={ + "min_value": ValidationErrors.PAGE_POSITIVE, + }, + ) + limit = serializers.IntegerField( + required=False, + default=settings.REST_FRAMEWORK["DEFAULT_PAGINATION_SETTINGS"]["DEFAULT_PAGE_LIMIT"], + min_value=1, + max_value=settings.REST_FRAMEWORK["DEFAULT_PAGINATION_SETTINGS"]["MAX_PAGE_LIMIT"], + error_messages={ + "min_value": ValidationErrors.LIMIT_POSITIVE, + }, + ) diff --git a/todo/serializers/team_creation_invite_code_serializer.py b/todo/serializers/team_creation_invite_code_serializer.py new file mode 100644 index 00000000..1f2947c4 --- /dev/null +++ b/todo/serializers/team_creation_invite_code_serializer.py @@ -0,0 +1,18 @@ +from rest_framework import serializers + + +class GenerateTeamCreationInviteCodeSerializer(serializers.Serializer): + """Serializer for generating team creation invite codes.""" + + description = serializers.CharField( + max_length=500, + required=False, + allow_blank=True, + help_text="Optional description for the team creation invite code (e.g., 'Code for marketing team')", + ) + + +class VerifyTeamCreationInviteCodeSerializer(serializers.Serializer): + """Serializer for verifying team creation invite codes.""" + + code = serializers.CharField(max_length=100) diff --git a/todo/serializers/update_task_serializer.py b/todo/serializers/update_task_serializer.py new file mode 100644 index 00000000..b49b96f3 --- /dev/null +++ b/todo/serializers/update_task_serializer.py @@ -0,0 +1,102 @@ +from rest_framework import serializers +from bson import ObjectId +from datetime import datetime, timezone + +from todo.constants.task import TaskPriority, TaskStatus +from todo.constants.messages import ValidationErrors +from zoneinfo import ZoneInfo, ZoneInfoNotFoundError + + +class UpdateTaskSerializer(serializers.Serializer): + title = serializers.CharField(required=False, allow_blank=True, max_length=255) + description = serializers.CharField(required=False, allow_blank=True, allow_null=True) + priority = serializers.ChoiceField( + required=False, + choices=[priority.name for priority in TaskPriority], + allow_null=True, + ) + status = serializers.ChoiceField( + required=False, + choices=[status.name for status in TaskStatus], + allow_null=True, + ) + assignee = serializers.DictField(required=False, allow_null=True) + labels = serializers.ListField( + child=serializers.CharField(), + required=False, + allow_null=True, + ) + timezone = serializers.CharField( + required=False, allow_null=True, help_text="IANA timezone string like 'Asia/Kolkata'" + ) + dueAt = serializers.DateTimeField(required=False, allow_null=True) + startedAt = serializers.DateTimeField(required=False, allow_null=True) + isAcknowledged = serializers.BooleanField(required=False) + + def validate_title(self, value): + if value is not None and not value.strip(): + raise serializers.ValidationError(ValidationErrors.BLANK_TITLE) + return value + + def validate_labels(self, value): + if value is None: + return value + + if not isinstance(value, (list, tuple)): + raise serializers.ValidationError(ValidationErrors.INVALID_LABELS_STRUCTURE) + + invalid_ids = [label_id for label_id in value if not ObjectId.is_valid(label_id)] + if invalid_ids: + raise serializers.ValidationError( + [ValidationErrors.INVALID_OBJECT_ID.format(label_id) for label_id in invalid_ids] + ) + + return value + + def validate_startedAt(self, value): + if value and value > datetime.now(timezone.utc): + raise serializers.ValidationError(ValidationErrors.FUTURE_STARTED_AT) + return value + + def validate_assignee(self, value): + if not value: + return None + + if not isinstance(value, dict): + raise serializers.ValidationError("Assignee must be a dictionary") + + assignee_id = value.get("assignee_id") + user_type = value.get("user_type") + + if not assignee_id: + raise serializers.ValidationError("assignee_id is required") + + if not user_type or user_type not in ["team", "user"]: + raise serializers.ValidationError("user_type must be either 'team' or 'user'") + + if not ObjectId.is_valid(assignee_id): + raise serializers.ValidationError(ValidationErrors.INVALID_OBJECT_ID.format(assignee_id)) + + return value + + def validate(self, data): + due_at = data.get("dueAt") + timezone_str = data.get("timezone") + errors = {} + if due_at is not None: + if not timezone_str: + errors["timezone"] = [ValidationErrors.REQUIRED_TIMEZONE] + else: + try: + tz = ZoneInfo(timezone_str) + + now_date = datetime.now(tz).date() + value_date = due_at.astimezone(tz).date() + + if value_date < now_date: + errors["dueAt"] = [ValidationErrors.PAST_DUE_DATE] + except ZoneInfoNotFoundError: + errors["timezone"] = [ValidationErrors.INVALID_TIMEZONE] + if errors: + raise serializers.ValidationError(errors) + return data diff --git a/todo/serializers/update_team_serializer.py b/todo/serializers/update_team_serializer.py new file mode 100644 index 00000000..0c6a0deb --- /dev/null +++ b/todo/serializers/update_team_serializer.py @@ -0,0 +1,41 @@ +from rest_framework import serializers +from bson import ObjectId + +from todo.constants.messages import ValidationErrors + + +class UpdateTeamSerializer(serializers.Serializer): + """ + Serializer for updating team details. + All fields are optional for PATCH operations. + """ + + name = serializers.CharField(max_length=100, required=False, allow_blank=False) + description = serializers.CharField(max_length=500, required=False, allow_blank=True, allow_null=True) + poc_id = serializers.CharField(required=False, allow_null=True, allow_blank=False) + member_ids = serializers.ListField(child=serializers.CharField(), required=False, allow_empty=True, default=None) + + def validate_name(self, value): + if value is not None and not value.strip(): + raise serializers.ValidationError("Team name cannot be blank") + return value.strip() if value else None + + def validate_description(self, value): + if value is not None: + return value.strip() + return value + + def validate_poc_id(self, value): + if not value or not value.strip(): + return None + if not ObjectId.is_valid(value): + raise serializers.ValidationError(ValidationErrors.INVALID_OBJECT_ID.format(value)) + return value + + def validate_member_ids(self, value): + if value is None: + return value + for member_id in value: + if not ObjectId.is_valid(member_id): + raise serializers.ValidationError(ValidationErrors.INVALID_OBJECT_ID.format(member_id)) + return value diff --git a/todo/serializers/update_watchlist_serializer.py b/todo/serializers/update_watchlist_serializer.py new file mode 100644 index 00000000..e1a39868 --- /dev/null +++ b/todo/serializers/update_watchlist_serializer.py @@ -0,0 +1,5 @@ +from rest_framework import serializers + + +class UpdateWatchlistSerializer(serializers.Serializer): + isActive = serializers.BooleanField(required=True) diff --git a/todo/services/dual_write_service.py b/todo/services/dual_write_service.py new file mode 100644 index 00000000..a1c1dbc0 --- /dev/null +++ b/todo/services/dual_write_service.py @@ -0,0 +1,500 @@ +import logging +from typing import Any, Dict, List +from django.db import transaction +from django.utils import timezone + +from todo.models.postgres import ( + PostgresUser, + PostgresTask, + PostgresTaskLabel, + PostgresTeam, + PostgresUserTeamDetails, + PostgresLabel, + PostgresRole, + PostgresTaskAssignment, + PostgresWatchlist, + PostgresUserRole, + PostgresAuditLog, + PostgresTeamCreationInviteCode, +) + +logger = logging.getLogger(__name__) + + +class DualWriteService: + """ + Service for dual-write operations to MongoDB and Postgres. + Ensures data consistency across both databases. + """ + + # Mapping of MongoDB collection names to Postgres models + COLLECTION_MODEL_MAP = { + "users": PostgresUser, + "tasks": PostgresTask, + "teams": PostgresTeam, + "labels": PostgresLabel, + "roles": PostgresRole, + "task_assignments": PostgresTaskAssignment, + "watchlists": PostgresWatchlist, + "user_team_details": PostgresUserTeamDetails, + "user_roles": PostgresUserRole, + "audit_logs": PostgresAuditLog, + "team_creation_invite_codes": PostgresTeamCreationInviteCode, + } + + def __init__(self): + self.sync_failures = [] + + def create_document(self, collection_name: str, data: Dict[str, Any], mongo_id: str) -> bool: + """ + Create a document in both MongoDB and Postgres. + + Args: + collection_name: Name of the MongoDB collection + data: Document data + mongo_id: MongoDB ObjectId as string + + Returns: + bool: True if both writes succeeded, False otherwise + """ + try: + # First, write to MongoDB (this should already be done by the calling code) + # Then, write to Postgres + postgres_model = self._get_postgres_model(collection_name) + if not postgres_model: + logger.error(f"No Postgres model found for collection: {collection_name}") + return False + + # Transform data for Postgres + postgres_data = self._transform_data_for_postgres(collection_name, data, mongo_id) + + # Write to Postgres + with transaction.atomic(): + # Extract labels before creating the task + labels = postgres_data.pop("labels", []) if collection_name == "tasks" else [] + + postgres_instance = postgres_model.objects.create(**postgres_data) + + # Handle labels for tasks + if collection_name == "tasks" and labels: + self._sync_task_labels(postgres_instance, labels) + + logger.info(f"Successfully synced {collection_name}:{mongo_id} to Postgres") + return True + + except Exception as e: + error_msg = f"Failed to sync {collection_name}:{mongo_id} to Postgres: {str(e)}" + logger.error(error_msg) + self._record_sync_failure(collection_name, mongo_id, error_msg) + return False + + def update_document(self, collection_name: str, mongo_id: str, data: Dict[str, Any]) -> bool: + """ + Update a document in both MongoDB and Postgres. + + Args: + collection_name: Name of the MongoDB collection + mongo_id: MongoDB ObjectId as string + data: Updated document data + + Returns: + bool: True if both updates succeeded, False otherwise + """ + try: + postgres_model = self._get_postgres_model(collection_name) + if not postgres_model: + logger.error(f"No Postgres model found for collection: {collection_name}") + return False + + # Transform data for Postgres + postgres_data = self._transform_data_for_postgres(collection_name, data, mongo_id) + + # Update in Postgres + with transaction.atomic(): + # Extract labels before updating the task + labels = postgres_data.pop("labels", []) if collection_name == "tasks" else [] + + postgres_instance = postgres_model.objects.get(mongo_id=mongo_id) + preserve_fields = {"created_at", "mongo_id"} + + for field, value in postgres_data.items(): + if hasattr(postgres_instance, field) and field not in preserve_fields: + setattr(postgres_instance, field, value) + + postgres_instance.sync_status = "SYNCED" + postgres_instance.sync_error = None + postgres_instance.save() + + # Handle labels for tasks + if collection_name == "tasks": + self._sync_task_labels(postgres_instance, labels) + + logger.info(f"Successfully updated {collection_name}:{mongo_id} in Postgres") + return True + + except postgres_model.DoesNotExist: + # Document doesn't exist in Postgres, create it + return self.create_document(collection_name, data, mongo_id) + except Exception as e: + error_msg = f"Failed to update {collection_name}:{mongo_id} in Postgres: {str(e)}" + logger.error(error_msg) + self._record_sync_failure(collection_name, mongo_id, error_msg) + return False + + def delete_document(self, collection_name: str, mongo_id: str) -> bool: + """ + Delete a document from both MongoDB and Postgres. + + Args: + collection_name: Name of the MongoDB collection + mongo_id: MongoDB ObjectId as string + + Returns: + bool: True if both deletes succeeded, False otherwise + """ + try: + postgres_model = self._get_postgres_model(collection_name) + if not postgres_model: + logger.error(f"No Postgres model found for collection: {collection_name}") + return False + + # Soft delete in Postgres (mark as deleted) + with transaction.atomic(): + postgres_instance = postgres_model.objects.get(mongo_id=mongo_id) + if hasattr(postgres_instance, "is_deleted"): + postgres_instance.is_deleted = True + postgres_instance.sync_status = "SYNCED" + postgres_instance.sync_error = None + postgres_instance.save() + else: + # If no soft delete field, actually delete the record + postgres_instance.delete() + + logger.info(f"Successfully deleted {collection_name}:{mongo_id} from Postgres") + return True + + except postgres_model.DoesNotExist: + logger.warning(f"Document {collection_name}:{mongo_id} not found in Postgres for deletion") + return True # Consider this a success since the goal is achieved + except Exception as e: + error_msg = f"Failed to delete {collection_name}:{mongo_id} from Postgres: {str(e)}" + logger.error(error_msg) + self._record_sync_failure(collection_name, mongo_id, error_msg) + return False + + def _get_postgres_model(self, collection_name: str): + """Get the corresponding Postgres model for a MongoDB collection.""" + return self.COLLECTION_MODEL_MAP.get(collection_name) + + def _transform_data_for_postgres(self, collection_name: str, data: Dict[str, Any], mongo_id: str) -> Dict[str, Any]: + """ + Transform MongoDB document data to Postgres model format. + + Args: + collection_name: Name of the MongoDB collection + data: MongoDB document data + mongo_id: MongoDB ObjectId as string + + Returns: + Dict: Transformed data for Postgres + """ + # Start with basic sync metadata + postgres_data = { + "mongo_id": mongo_id, + "sync_status": "SYNCED", + "sync_error": None, + } + + # Handle special cases for different collections + if collection_name == "tasks": + postgres_data.update(self._transform_task_data(data)) + elif collection_name == "teams": + postgres_data.update(self._transform_team_data(data)) + elif collection_name == "users": + postgres_data.update(self._transform_user_data(data)) + elif collection_name == "labels": + postgres_data.update(self._transform_label_data(data)) + elif collection_name == "roles": + postgres_data.update(self._transform_role_data(data)) + elif collection_name == "task_assignments": + postgres_data.update(self._transform_task_assignment_data(data)) + elif collection_name == "watchlists": + postgres_data.update(self._transform_watchlist_data(data)) + elif collection_name == "user_team_details": + postgres_data.update(self._transform_user_team_details_data(data)) + elif collection_name == "user_roles": + postgres_data.update(self._transform_user_role_data(data)) + elif collection_name == "audit_logs": + postgres_data.update(self._transform_audit_log_data(data)) + elif collection_name == "team_creation_invite_codes": + postgres_data.update(self._transform_team_creation_invite_code_data(data)) + else: + # Generic transformation for unknown collections + postgres_data.update(self._transform_generic_data(data)) + + return postgres_data + + def _transform_task_data(self, data: Dict[str, Any]) -> Dict[str, Any]: + """Transform task data for Postgres.""" + # Handle priority enum conversion + priority = data.get("priority", 3) + if hasattr(priority, "value"): # If it's an enum, get its value + priority = priority.value + + # Handle status enum conversion + status = data.get("status", "TODO") + if hasattr(status, "value"): # If it's an enum, get its value + status = status.value + + return { + "display_id": data.get("displayId"), + "title": data.get("title"), + "description": data.get("description"), + "priority": priority, # Store as integer like MongoDB + "status": status, # Store as string value like MongoDB + "is_acknowledged": data.get("isAcknowledged", False), + "is_deleted": data.get("isDeleted", False), + "started_at": data.get("startedAt"), + "due_at": data.get("dueAt"), + "created_at": data.get("createdAt"), + "updated_at": data.get("updatedAt"), + "created_by": str(data.get("createdBy", "")), + "updated_by": str(data.get("updatedBy", "")) if data.get("updatedBy") else None, + "labels": data.get("labels", []), # Include labels for processing + } + + def _transform_team_data(self, data: Dict[str, Any]) -> Dict[str, Any]: + """Transform team data for Postgres.""" + return { + "name": data.get("name"), + "description": data.get("description"), + "invite_code": data.get("invite_code"), + "poc_id": str(data.get("poc_id", "")) if data.get("poc_id") else None, + "created_by": str(data.get("created_by", "")), + "updated_by": str(data.get("updated_by", "")), + "is_deleted": data.get("is_deleted", False), + "created_at": data.get("created_at"), + "updated_at": data.get("updated_at"), + } + + def _transform_user_data(self, data: Dict[str, Any]) -> Dict[str, Any]: + """Transform user data for Postgres.""" + return { + "google_id": data.get("google_id"), + "email_id": data.get("email_id"), + "name": data.get("name"), + "picture": data.get("picture"), + "created_at": data.get("created_at"), + "updated_at": data.get("updated_at"), + } + + def _transform_label_data(self, data: Dict[str, Any]) -> Dict[str, Any]: + """Transform label data for Postgres.""" + return { + "name": data.get("name"), + "color": data.get("color", "#000000"), + "description": data.get("description"), + "created_at": data.get("createdAt"), + "updated_at": data.get("updatedAt"), + } + + def _transform_role_data(self, data: Dict[str, Any]) -> Dict[str, Any]: + """Transform role data for Postgres.""" + return { + "name": data.get("name"), + "description": data.get("description"), + "permissions": data.get("permissions", {}), + "created_at": data.get("created_at"), + "updated_at": data.get("updated_at"), + } + + def _transform_task_assignment_data(self, data: Dict[str, Any]) -> Dict[str, Any]: + """Transform task assignment data for Postgres.""" + return { + "task_mongo_id": str(data.get("task_mongo_id", "")), + "assignee_id": str(data.get("assignee_id", "")), + "user_type": data.get("user_type", "user"), + "team_id": str(data.get("team_id", "")) if data.get("team_id") else None, + "is_active": data.get("is_active", True), + "created_at": data.get("created_at"), + "updated_at": data.get("updated_at"), + "created_by": str(data.get("created_by", "")), + "updated_by": str(data.get("updated_by", "")) if data.get("updated_by") else None, + } + + def _transform_watchlist_data(self, data: Dict[str, Any]) -> Dict[str, Any]: + """Transform watchlist data for Postgres.""" + return { + "task_id": str(data.get("task_id", "")), + "user_id": str(data.get("user_id", "")), + "is_active": data.get("is_active", True), + "created_by": str(data.get("created_by", "")), + "updated_by": str(data.get("updated_by", "")) if data.get("updated_by") else None, + "created_at": data.get("created_at"), + "updated_at": data.get("updated_at"), + } + + def _sync_task_labels(self, postgres_task, labels: List[str]): + """ + Sync task labels to PostgresTaskLabel junction table. + + Args: + postgres_task: PostgresTask instance + labels: List of label MongoDB ObjectIds as strings + """ + try: + # Clear existing labels for this task + PostgresTaskLabel.objects.filter(task=postgres_task).delete() + + # Add new labels + for label_mongo_id in labels: + if label_mongo_id: # Skip empty labels + PostgresTaskLabel.objects.create(task=postgres_task, label_mongo_id=str(label_mongo_id)) + + logger.info(f"Successfully synced {len(labels)} labels for task {postgres_task.mongo_id}") + + except Exception as e: + logger.error(f"Failed to sync labels for task {postgres_task.mongo_id}: {str(e)}") + # Don't fail the entire operation, just log the error + + def _sync_task_assignment_update(self, task_mongo_id: str, new_assignment_data: Dict[str, Any]): + """ + Handle task assignment updates by deactivating old records and creating new ones. + This mirrors MongoDB's approach of soft deletes. + + Args: + task_mongo_id: MongoDB ObjectId of the task as string + new_assignment_data: Data for the new assignment + """ + try: + # Deactivate all existing assignments for this task + PostgresTaskAssignment.objects.filter(task_mongo_id=task_mongo_id).update( + status="REJECTED", # Mark as rejected instead of deleting + updated_at=timezone.now(), + ) + + # Create new assignment + PostgresTaskAssignment.objects.create( + mongo_id=new_assignment_data.get("mongo_id"), + task_mongo_id=new_assignment_data.get("task_mongo_id"), + user_mongo_id=new_assignment_data.get("user_mongo_id"), + team_mongo_id=new_assignment_data.get("team_mongo_id"), + status=new_assignment_data.get("status", "ASSIGNED"), + assigned_at=new_assignment_data.get("assigned_at"), + started_at=new_assignment_data.get("started_at"), + completed_at=new_assignment_data.get("completed_at"), + created_at=new_assignment_data.get("created_at"), + updated_at=new_assignment_data.get("updated_at"), + assigned_by=new_assignment_data.get("assigned_by"), + updated_by=new_assignment_data.get("updated_by"), + ) + + logger.info(f"Successfully synced task assignment update for task {task_mongo_id}") + + except Exception as e: + logger.error(f"Failed to sync task assignment update for task {task_mongo_id}: {str(e)}") + # Don't fail the entire operation, just log the error + + def _transform_user_team_details_data(self, data: Dict[str, Any]) -> Dict[str, Any]: + """Transform user team details data for Postgres.""" + return { + "user_id": str(data.get("user_id", "")), + "team_id": str(data.get("team_id", "")), + "is_active": data.get("is_active", True), + "created_by": str(data.get("created_by", "")), + "updated_by": str(data.get("updated_by", "")), + "created_at": data.get("created_at"), + "updated_at": data.get("updated_at"), + } + + def _transform_user_role_data(self, data: Dict[str, Any]) -> Dict[str, Any]: + return { + "user_id": str(data.get("user_id", "")), + "role_name": data.get("role_name"), + "scope": data.get("scope"), + "team_id": str(data.get("team_id", "")) if data.get("team_id") else None, + "is_active": data.get("is_active", True), + "created_at": data.get("created_at"), + "created_by": str(data.get("created_by", "")), + } + + def _transform_audit_log_data(self, data: Dict[str, Any]) -> Dict[str, Any]: + return { + "task_id": str(data.get("task_id", "")) if data.get("task_id") else None, + "team_id": str(data.get("team_id", "")) if data.get("team_id") else None, + "previous_executor_id": str(data.get("previous_executor_id", "")) + if data.get("previous_executor_id") + else None, + "new_executor_id": str(data.get("new_executor_id", "")) if data.get("new_executor_id") else None, + "spoc_id": str(data.get("spoc_id", "")) if data.get("spoc_id") else None, + "action": data.get("action"), + "timestamp": data.get("timestamp"), + "status_from": data.get("status_from"), + "status_to": data.get("status_to"), + "assignee_from": str(data.get("assignee_from", "")) if data.get("assignee_from") else None, + "assignee_to": str(data.get("assignee_to", "")) if data.get("assignee_to") else None, + "performed_by": str(data.get("performed_by", "")) if data.get("performed_by") else None, + } + + def _transform_team_creation_invite_code_data(self, data: Dict[str, Any]) -> Dict[str, Any]: + """Transform team creation invite code data for Postgres.""" + return { + "code": data.get("code"), + "description": data.get("description"), + "created_by": str(data.get("created_by", "")), + "used_by": str(data.get("used_by", "")) if data.get("used_by") else None, + "is_used": data.get("is_used", False), + "created_at": data.get("created_at"), + "used_at": data.get("used_at"), + } + + def _transform_generic_data(self, data: Dict[str, Any]) -> Dict[str, Any]: + """Generic transformation for unknown collections.""" + # Convert MongoDB field names to snake_case and handle basic types + transformed = {} + for key, value in data.items(): + if key == "_id": + continue # Skip MongoDB _id field + + # Convert camelCase to snake_case + snake_key = "".join(["_" + c.lower() if c.isupper() else c for c in key]).lstrip("_") + + # Handle ObjectId conversion + if hasattr(value, "__str__") and len(str(value)) == 24: + transformed[snake_key] = str(value) + else: + transformed[snake_key] = value + + return transformed + + def _record_sync_failure(self, collection_name: str, mongo_id: str, error: str): + """Record a sync failure for alerting purposes.""" + failure_record = { + "collection": collection_name, + "mongo_id": mongo_id, + "error": error, + "timestamp": timezone.now(), + } + self.sync_failures.append(failure_record) + + # Log the failure + logger.error(f"Sync failure recorded: {failure_record}") + + # TODO: Implement alerting mechanism (email, Slack, etc.) + self._send_alert(failure_record) + + def _send_alert(self, failure_record: Dict[str, Any]): + """Send alert for sync failure.""" + # TODO: Implement actual alerting (email, Slack, etc.) + logger.critical(f"ALERT: Sync failure detected - {failure_record}") + + # For now, just log. In production, this would send emails/Slack messages + pass + + def get_sync_failures(self) -> list: + """Get list of recent sync failures.""" + return self.sync_failures.copy() + + def clear_sync_failures(self): + """Clear the sync failures list.""" + self.sync_failures.clear() diff --git a/todo/services/enhanced_dual_write_service.py b/todo/services/enhanced_dual_write_service.py new file mode 100644 index 00000000..cc321880 --- /dev/null +++ b/todo/services/enhanced_dual_write_service.py @@ -0,0 +1,179 @@ +import logging +from typing import Any, Dict, Optional +from django.conf import settings + +from todo.services.dual_write_service import DualWriteService + +logger = logging.getLogger(__name__) + + +class EnhancedDualWriteService(DualWriteService): + """ + Enhanced dual-write service that provides additional functionality. + Extends the base DualWriteService with batch operations and enhanced monitoring. + """ + + def __init__(self): + super().__init__() + self.enabled = getattr(settings, "DUAL_WRITE_ENABLED", True) + + def create_document(self, collection_name: str, data: Dict[str, Any], mongo_id: str) -> bool: + """ + Create a document in both MongoDB and Postgres. + """ + if not self.enabled: + logger.debug("Dual-write is disabled, skipping Postgres sync") + return True + + return super().create_document(collection_name, data, mongo_id) + + def update_document(self, collection_name: str, mongo_id: str, data: Dict[str, Any]) -> bool: + """ + Update a document in both MongoDB and Postgres. + """ + if not self.enabled: + logger.debug("Dual-write is disabled, skipping Postgres sync") + return True + + return super().update_document(collection_name, mongo_id, data) + + def delete_document(self, collection_name: str, mongo_id: str) -> bool: + """ + Delete a document from both MongoDB and Postgres. + """ + if not self.enabled: + logger.debug("Dual-write is disabled, skipping Postgres sync") + return True + + return super().delete_document(collection_name, mongo_id) + + def batch_operations(self, operations: list) -> bool: + """ + Perform multiple operations in batch. + """ + if not self.enabled: + logger.debug("Dual-write is disabled, skipping Postgres sync") + return True + + return self._batch_operations_sync(operations) + + def _batch_operations_sync(self, operations: list) -> bool: + """Perform batch operations synchronously.""" + success_count = 0 + failure_count = 0 + + for op in operations: + try: + collection_name = op["collection_name"] + data = op.get("data", {}) + mongo_id = op["mongo_id"] + operation = op["operation"] + + if operation == "create": + success = super().create_document(collection_name, data, mongo_id) + elif operation == "update": + success = super().update_document(collection_name, mongo_id, data) + elif operation == "delete": + success = super().delete_document(collection_name, mongo_id) + else: + logger.error(f"Unknown operation: {operation}") + failure_count += 1 + continue + + if success: + success_count += 1 + else: + failure_count += 1 + + except Exception as e: + logger.error(f"Error processing operation {op}: {str(e)}") + failure_count += 1 + + logger.info(f"Batch sync completed. Success: {success_count}, Failures: {failure_count}") + return failure_count == 0 + + def get_sync_status(self, collection_name: str, mongo_id: str) -> Optional[str]: + """ + Get the sync status of a document in Postgres. + + Args: + collection_name: Name of the MongoDB collection + mongo_id: MongoDB ObjectId as string + + Returns: + str: Sync status or None if not found + """ + try: + postgres_model = self._get_postgres_model(collection_name) + if not postgres_model: + return None + + instance = postgres_model.objects.get(mongo_id=mongo_id) + return instance.sync_status + except postgres_model.DoesNotExist: + return None + except Exception as e: + logger.error(f"Error getting sync status for {collection_name}:{mongo_id}: {str(e)}") + return None + + def get_sync_metrics(self) -> Dict[str, Any]: + """ + Get metrics about sync operations. + + Returns: + Dict: Sync metrics + """ + try: + metrics = { + "total_failures": len(self.sync_failures), + "failures_by_collection": {}, + "recent_failures": self.sync_failures[-10:] if self.sync_failures else [], + "enabled": self.enabled, + } + + # Count failures by collection + for failure in self.sync_failures: + collection = failure["collection"] + if collection not in metrics["failures_by_collection"]: + metrics["failures_by_collection"][collection] = 0 + metrics["failures_by_collection"][collection] += 1 + + return metrics + except Exception as e: + logger.error(f"Error getting sync metrics: {str(e)}") + return {} + + def retry_failed_sync(self, collection_name: str, mongo_id: str) -> bool: + """ + Retry a failed sync operation. + + Args: + collection_name: Name of the MongoDB collection + mongo_id: MongoDB ObjectId as string + + Returns: + bool: True if retry was successful, False otherwise + """ + try: + # Find the failure record + failure_record = None + for failure in self.sync_failures: + if failure["collection"] == collection_name and failure["mongo_id"] == mongo_id: + failure_record = failure + break + + if not failure_record: + logger.warning(f"No failure record found for {collection_name}:{mongo_id}") + return False + + # Remove from failures list + self.sync_failures.remove(failure_record) + + # Retry the operation (this would need the original data) + # For now, just log the retry attempt + logger.info(f"Retrying sync for {collection_name}:{mongo_id}") + + return True + except Exception as e: + logger.error(f"Error retrying failed sync for {collection_name}:{mongo_id}: {str(e)}") + return False diff --git a/todo/services/google_oauth_service.py b/todo/services/google_oauth_service.py new file mode 100644 index 00000000..0cce215a --- /dev/null +++ b/todo/services/google_oauth_service.py @@ -0,0 +1,104 @@ +import requests +import secrets +from urllib.parse import urlencode +from django.conf import settings + +from todo.exceptions.auth_exceptions import ( + APIException, + AuthException, +) +from todo.constants.messages import ApiErrors + + +class GoogleOAuthService: + GOOGLE_AUTH_URL = "https://accounts.google.com/o/oauth2/v2/auth" + GOOGLE_TOKEN_URL = "https://oauth2.googleapis.com/token" + GOOGLE_USER_INFO_URL = "https://www.googleapis.com/oauth2/v2/userinfo" + + @classmethod + def get_authorization_url(cls, redirect_url: str | None = None) -> tuple[str, str]: + try: + state = secrets.token_urlsafe(32) + + params = { + "client_id": settings.GOOGLE_OAUTH["CLIENT_ID"], + "redirect_uri": redirect_url or settings.GOOGLE_OAUTH["REDIRECT_URI"], + "response_type": "code", + "scope": "openid email profile", + "access_type": "offline", + "prompt": "consent", + "state": state, + } + + auth_url = f"{cls.GOOGLE_AUTH_URL}?{urlencode(params)}" + return auth_url, state + + except Exception: + raise AuthException(ApiErrors.GOOGLE_AUTH_FAILED) + + @classmethod + def handle_callback(cls, authorization_code: str) -> dict: + try: + tokens = cls._exchange_code_for_tokens(authorization_code) + + user_info = cls._get_user_info(tokens["access_token"]) + + return { + "google_id": user_info["id"], + "email": user_info["email"], + "name": user_info["name"], + "picture": user_info.get("picture"), + } + + except Exception as e: + if isinstance(e, APIException): + raise + raise APIException(ApiErrors.GOOGLE_API_ERROR) from e + + @classmethod + def _exchange_code_for_tokens(cls, code: str) -> dict: + try: + data = { + "client_id": settings.GOOGLE_OAUTH["CLIENT_ID"], + "client_secret": settings.GOOGLE_OAUTH["CLIENT_SECRET"], + "code": code, + "grant_type": "authorization_code", + "redirect_uri": settings.GOOGLE_OAUTH["REDIRECT_URI"], + } + + response = requests.post(cls.GOOGLE_TOKEN_URL, data=data, timeout=30) + + if response.status_code != 200: + raise APIException(ApiErrors.TOKEN_EXCHANGE_FAILED) + + tokens = response.json() + + if "error" in tokens: + raise APIException(ApiErrors.GOOGLE_API_ERROR) + + return tokens + + except requests.exceptions.RequestException: + raise APIException(ApiErrors.GOOGLE_API_ERROR) + + @classmethod + def _get_user_info(cls, access_token: str) -> dict: + try: + headers = {"Authorization": f"Bearer {access_token}"} + response = requests.get(cls.GOOGLE_USER_INFO_URL, headers=headers, timeout=30) + + if response.status_code != 200: + raise APIException(ApiErrors.USER_INFO_FETCH_FAILED.format("HTTP error")) + + user_info = response.json() + + required_fields = ["id", "email", "name"] + missing_fields = [field for field in required_fields if field not in user_info] + + if missing_fields: + raise APIException(ApiErrors.MISSING_USER_INFO_FIELDS.format(", ".join(missing_fields))) + + return user_info + + except requests.exceptions.RequestException: + raise APIException(ApiErrors.GOOGLE_API_ERROR) diff --git a/todo/services/label_service.py b/todo/services/label_service.py new file mode 100644 index 00000000..07346410 --- /dev/null +++ b/todo/services/label_service.py @@ -0,0 +1,111 @@ +from dataclasses import dataclass +from django.conf import settings +from django.urls import reverse_lazy +from urllib.parse import urlencode + +from todo.dto.responses.paginated_response import LinksData +from todo.repositories.label_repository import LabelRepository +from todo.dto.responses.get_labels_response import GetLabelsResponse +from todo.models.label import LabelModel +from todo.dto.label_dto import LabelDTO +from todo.constants.messages import ApiErrors + + +@dataclass +class PaginationConfig: + DEFAULT_PAGE: int = 1 + DEFAULT_LIMIT: int = 10 + MAX_LIMIT: int = settings.REST_FRAMEWORK["DEFAULT_PAGINATION_SETTINGS"]["MAX_PAGE_LIMIT"] + SEARCH: str = "" + + +class LabelService: + @classmethod + def get_labels( + cls, + page: int = PaginationConfig.DEFAULT_PAGE, + limit: int = PaginationConfig.DEFAULT_LIMIT, + search=PaginationConfig.SEARCH, + ) -> GetLabelsResponse: + try: + [total_count, labels] = LabelRepository.get_all(page, limit, search) + total_pages = (total_count + limit - 1) // limit + + if total_count > 0 and page > total_pages: + return GetLabelsResponse( + labels=[], + limit=limit, + links=None, + error={"message": ApiErrors.PAGE_NOT_FOUND, "code": "PAGE_NOT_FOUND"}, + ) + if not labels: + return GetLabelsResponse( + labels=[], + total=total_count, + page=page, + limit=limit, + links=None, + ) + + label_dtos = [cls.prepare_label_dto(label) for label in labels] + + links = cls.prepare_pagination_links(page=page, total_pages=total_pages, limit=limit, search=search) + + return GetLabelsResponse(labels=label_dtos, total=total_count, page=page, limit=limit, links=links) + + except Exception: + return GetLabelsResponse( + labels=[], + limit=limit, + links=None, + error={"message": ApiErrors.UNEXPECTED_ERROR_OCCURRED, "code": "INTERNAL_ERROR"}, + ) + + @classmethod + def prepare_pagination_links(cls, page: int, total_pages: int, limit: int, search: str) -> LinksData: + next_link = None + prev_link = None + + if page < total_pages: + next_page = page + 1 + next_link = cls.build_page_url(next_page, limit, search) + + if page > 1: + prev_page = page - 1 + prev_link = cls.build_page_url(prev_page, limit, search) + + return LinksData(next=next_link, prev=prev_link) + + @classmethod + def build_page_url(cls, page: int, limit: int, search: str) -> str: + base_url = reverse_lazy("labels") + query_params = urlencode({"page": page, "limit": limit, "search": search}) + return f"{base_url}?{query_params}" + + @classmethod + def prepare_label_dto(cls, label_model: LabelModel) -> LabelDTO: + from todo.dto.user_dto import UserDTO + + created_by_dto = None + if label_model.createdBy: + if label_model.createdBy == "system": + created_by_dto = UserDTO(id="system", name="System") + else: + created_by_dto = UserDTO(id=label_model.createdBy, name="User") + + updated_by_dto = None + if label_model.updatedBy: + if label_model.updatedBy == "system": + updated_by_dto = UserDTO(id="system", name="System") + else: + updated_by_dto = UserDTO(id=label_model.updatedBy, name="User") + + return LabelDTO( + id=str(label_model.id), + name=label_model.name, + color=label_model.color, + createdAt=label_model.createdAt, + updatedAt=label_model.updatedAt, + createdBy=created_by_dto, + updatedBy=updated_by_dto, + ) diff --git a/todo/services/postgres_sync_service.py b/todo/services/postgres_sync_service.py new file mode 100644 index 00000000..1e6a79be --- /dev/null +++ b/todo/services/postgres_sync_service.py @@ -0,0 +1,228 @@ +import logging +from django.db import connection +from django.conf import settings + +from todo_project.db.config import DatabaseManager +from todo.services.dual_write_service import DualWriteService + +logger = logging.getLogger(__name__) + + +class PostgresSyncService: + """ + Service to synchronize PostgreSQL tables with MongoDB data. + Checks if tables exist and copies data from MongoDB if needed. + Currently handles labels and roles tables only. + """ + + def __init__(self): + self.db_manager = DatabaseManager() + self.dual_write_service = DualWriteService() + self.enabled = getattr(settings, "POSTGRES_SYNC_ENABLED", True) + + def sync_all_tables(self) -> bool: + """ + Synchronize labels and roles PostgreSQL tables with MongoDB data. + + Returns: + bool: True if all syncs completed successfully, False otherwise + """ + if not self.enabled: + logger.info("PostgreSQL sync is disabled, skipping") + return True + + logger.info("Starting PostgreSQL table synchronization for labels and roles") + logger.info(f"PostgreSQL sync enabled: {self.enabled}") + + sync_operations = [ + ("labels", self._sync_labels_table), + ("roles", self._sync_roles_table), + ] + + success_count = 0 + total_operations = len(sync_operations) + + for table_name, sync_func in sync_operations: + try: + logger.info(f"Syncing table: {table_name}") + if sync_func(): + logger.info(f"Successfully synced table: {table_name}") + success_count += 1 + else: + logger.error(f"Failed to sync table: {table_name}") + except Exception as e: + logger.error(f"Error syncing table {table_name}: {str(e)}") + + logger.info(f"PostgreSQL sync completed - {success_count}/{total_operations} tables synced successfully") + return success_count == total_operations + + def _check_table_exists(self, table_name: str) -> bool: + """ + Check if a PostgreSQL table exists. + + Args: + table_name: Name of the table to check + + Returns: + bool: True if table exists, False otherwise + """ + try: + with connection.cursor() as cursor: + cursor.execute( + """ + SELECT EXISTS ( + SELECT FROM information_schema.tables + WHERE table_schema = 'public' + AND table_name = %s + ); + """, + [table_name], + ) + return cursor.fetchone()[0] + except Exception as e: + logger.error(f"Error checking if table {table_name} exists: {str(e)}") + return False + + def _get_mongo_collection_count(self, collection_name: str) -> int: + """ + Get the count of documents in a MongoDB collection. + + Args: + collection_name: Name of the MongoDB collection + + Returns: + int: Number of documents in the collection + """ + try: + collection = self.db_manager.get_collection(collection_name) + + # Labels use isDeleted field for soft deletes + if collection_name == "labels": + return collection.count_documents({"isDeleted": {"$ne": True}}) + else: + # For roles and other collections without soft delete, count all documents + return collection.count_documents({}) + + except Exception as e: + logger.error(f"Error getting count for collection {collection_name}: {str(e)}") + return 0 + + def _get_postgres_table_count(self, table_name: str) -> int: + """ + Get the count of records in a PostgreSQL table. + + Args: + table_name: Name of the PostgreSQL table + + Returns: + int: Number of records in the table + """ + try: + with connection.cursor() as cursor: + cursor.execute(f"SELECT COUNT(*) FROM {table_name};") + return cursor.fetchone()[0] + except Exception as e: + logger.error(f"Error getting count for table {table_name}: {str(e)}") + return 0 + + def _sync_labels_table(self) -> bool: + """Synchronize the labels table.""" + table_name = "postgres_labels" + + if not self._check_table_exists(table_name): + logger.warning(f"Table {table_name} does not exist, skipping sync") + return True + + mongo_count = self._get_mongo_collection_count("labels") + postgres_count = self._get_postgres_table_count(table_name) + + if postgres_count >= mongo_count: + logger.info(f"Labels table already has {postgres_count} records, MongoDB has {mongo_count}. Skipping sync.") + return True + + logger.info(f"Syncing labels: MongoDB has {mongo_count} records, PostgreSQL has {postgres_count} records") + logger.info(f"Will sync {mongo_count - postgres_count} labels to PostgreSQL") + + try: + collection = self.db_manager.get_collection("labels") + labels = collection.find({"isDeleted": {"$ne": True}}) + + synced_count = 0 + for label in labels: + try: + # Check if label already exists in PostgreSQL + from todo.models.postgres.label import PostgresLabel + + existing = PostgresLabel.objects.filter(mongo_id=str(label["_id"])).first() + if existing: + continue + + # Transform data for PostgreSQL + postgres_data = self.dual_write_service._transform_label_data(label) + postgres_data["mongo_id"] = str(label["_id"]) + postgres_data["sync_status"] = "SYNCED" + + logger.debug(f"Creating label in PostgreSQL: {postgres_data}") + + # Create in PostgreSQL + PostgresLabel.objects.create(**postgres_data) + synced_count += 1 + + except Exception as e: + logger.error(f"Error syncing label {label.get('_id')}: {str(e)}") + continue + + logger.info(f"Successfully synced {synced_count} labels to PostgreSQL") + return True + + except Exception as e: + logger.error(f"Error syncing labels table: {str(e)}") + return False + + def _sync_roles_table(self) -> bool: + """Synchronize the roles table.""" + table_name = "postgres_roles" + + if not self._check_table_exists(table_name): + logger.warning(f"Table {table_name} does not exist, skipping sync") + return True + + mongo_count = self._get_mongo_collection_count("roles") + postgres_count = self._get_postgres_table_count(table_name) + + if postgres_count >= mongo_count: + logger.info(f"Roles table already has {postgres_count} records, MongoDB has {mongo_count}. Skipping sync.") + return True + + logger.info(f"Syncing roles: MongoDB has {mongo_count} records, PostgreSQL has {postgres_count} records") + + try: + collection = self.db_manager.get_collection("roles") + roles = collection.find({}) + + synced_count = 0 + for role in roles: + try: + from todo.models.postgres.role import PostgresRole + + existing = PostgresRole.objects.filter(mongo_id=str(role["_id"])).first() + if existing: + continue + + postgres_data = self.dual_write_service._transform_role_data(role) + postgres_data["mongo_id"] = str(role["_id"]) + postgres_data["sync_status"] = "SYNCED" + + PostgresRole.objects.create(**postgres_data) + synced_count += 1 + + except Exception as e: + logger.error(f"Error syncing role {role.get('_id')}: {str(e)}") + continue + + logger.info(f"Successfully synced {synced_count} roles to PostgreSQL") + return True + + except Exception as e: + logger.error(f"Error syncing roles table: {str(e)}") + return False diff --git a/todo/services/role_service.py b/todo/services/role_service.py new file mode 100644 index 00000000..042e24b4 --- /dev/null +++ b/todo/services/role_service.py @@ -0,0 +1,27 @@ +from typing import List, Dict, Any, Optional + +from todo.repositories.role_repository import RoleRepository +from todo.dto.role_dto import RoleDTO +from todo.exceptions.role_exceptions import ( + RoleNotFoundException, + RoleOperationException, +) + + +class RoleService: + @classmethod + def get_all_roles(cls, filters: Optional[Dict[str, Any]] = None) -> List[RoleDTO]: + """Get all roles with optional filtering.""" + try: + role_models = RoleRepository.list_all(filters=filters) + return [RoleDTO.from_model(role) for role in role_models] + except Exception as e: + raise RoleOperationException(f"Failed to get roles: {str(e)}") + + @classmethod + def get_role_by_id(cls, role_id: str) -> RoleDTO: + """Get a single role by ID.""" + role_model = RoleRepository.get_by_id(role_id) + if not role_model: + raise RoleNotFoundException(role_id) + return RoleDTO.from_model(role_model) diff --git a/todo/services/task_assignment_service.py b/todo/services/task_assignment_service.py new file mode 100644 index 00000000..5b530f6b --- /dev/null +++ b/todo/services/task_assignment_service.py @@ -0,0 +1,152 @@ +from typing import Optional + +from todo.dto.task_assignment_dto import TaskAssignmentResponseDTO, CreateTaskAssignmentDTO +from todo.dto.responses.create_task_assignment_response import CreateTaskAssignmentResponse +from todo.models.common.pyobjectid import PyObjectId +from todo.models.task_assignment import TaskAssignmentModel +from todo.repositories.task_assignment_repository import TaskAssignmentRepository +from todo.repositories.task_repository import TaskRepository +from todo.repositories.user_repository import UserRepository +from todo.repositories.team_repository import TeamRepository +from todo.exceptions.user_exceptions import UserNotFoundException +from todo.exceptions.task_exceptions import TaskNotFoundException +from todo.dto.task_assignment_dto import TaskAssignmentDTO +from todo.models.audit_log import AuditLogModel +from todo.repositories.audit_log_repository import AuditLogRepository + + +class TaskAssignmentService: + @classmethod + def create_task_assignment(cls, dto: CreateTaskAssignmentDTO, user_id: str) -> CreateTaskAssignmentResponse: + """ + Create a new task assignment with validation for task, user, and team existence. + """ + # Validate task exists + task = TaskRepository.get_by_id(dto.task_id) + if not task: + raise TaskNotFoundException(dto.task_id) + + # Validate assignee exists based on user_type + if dto.user_type == "user": + assignee = UserRepository.get_by_id(dto.assignee_id) + if not assignee: + raise UserNotFoundException(dto.assignee_id) + elif dto.user_type == "team": + assignee = TeamRepository.get_by_id(dto.assignee_id) + if not assignee: + raise ValueError(f"Team not found: {dto.assignee_id}") + else: + raise ValueError("Invalid user_type") + + # Check if task already has an active assignment + existing_assignment = TaskAssignmentRepository.get_by_task_id(dto.task_id) + if existing_assignment: + # If previous assignment was to a team, log unassignment + if existing_assignment.user_type == "team": + AuditLogRepository.create( + AuditLogModel( + task_id=existing_assignment.task_id, + team_id=existing_assignment.assignee_id, + action="unassigned_from_team", + performed_by=PyObjectId(user_id), + ) + ) + # Update existing assignment + updated_assignment = TaskAssignmentRepository.update_assignment( + dto.task_id, dto.assignee_id, dto.user_type, user_id + ) + if not updated_assignment: + raise ValueError("Failed to update task assignment") + assignment = updated_assignment + + else: + # Create new assignment + task_assignment = TaskAssignmentModel( + task_id=PyObjectId(dto.task_id), + assignee_id=PyObjectId(dto.assignee_id), + user_type=dto.user_type, + created_by=PyObjectId(user_id), + updated_by=None, + team_id=PyObjectId(dto.team_id) if dto.team_id else None, + ) + assignment = TaskAssignmentRepository.create(task_assignment) + + # If new assignment is to a team, log assignment + if assignment.user_type == "team": + AuditLogRepository.create( + AuditLogModel( + task_id=assignment.task_id, + team_id=assignment.assignee_id, + action="assigned_to_team", + performed_by=PyObjectId(user_id), + ) + ) + + # Also insert into assignee_task_details if this is a team assignment (legacy, can be removed if not needed) + # if dto.user_type == "team": + # TaskAssignmentRepository.create( + # TaskAssignmentModel( + # assignee_id=PyObjectId(dto.assignee_id), + # task_id=PyObjectId(dto.task_id), + # user_type="team", + # is_active=True, + # created_by=PyObjectId(user_id), + # updated_by=None, + # ) + # ) + + # Prepare response + response_dto = TaskAssignmentDTO( + id=str(assignment.id), + task_id=str(assignment.task_id), + assignee_id=str(assignment.assignee_id), + user_type=assignment.user_type, + executor_id=str(assignment.executor_id) if assignment.executor_id else None, + is_active=assignment.is_active, + created_by=str(assignment.created_by), + updated_by=str(assignment.updated_by) if assignment.updated_by else None, + created_at=assignment.created_at, + updated_at=assignment.updated_at, + ) + + return CreateTaskAssignmentResponse(data=response_dto) + + @classmethod + def get_task_assignment(cls, task_id: str) -> Optional[TaskAssignmentResponseDTO]: + """ + Get task assignment by task ID. + """ + assignment = TaskAssignmentRepository.get_by_task_id(task_id) + if not assignment: + return None + + # Get assignee name + if assignment.user_type == "user": + assignee = UserRepository.get_by_id(str(assignment.assignee_id)) + assignee_name = assignee.name if assignee else "Unknown User" + elif assignment.user_type == "team": + assignee = TeamRepository.get_by_id(str(assignment.assignee_id)) + assignee_name = assignee.name if assignee else "Unknown Team" + else: + assignee_name = "Unknown" + + return TaskAssignmentResponseDTO( + id=str(assignment.id), + task_id=str(assignment.task_id), + assignee_id=str(assignment.assignee_id), + user_type=assignment.user_type, + assignee_name=assignee_name, + executor_id=str(assignment.executor_id) if assignment.executor_id else None, + is_active=assignment.is_active, + created_by=str(assignment.created_by), + updated_by=str(assignment.updated_by) if assignment.updated_by else None, + created_at=assignment.created_at, + updated_at=assignment.updated_at, + ) + + @classmethod + def delete_task_assignment(cls, task_id: str, user_id: str) -> bool: + """ + Delete task assignment by task ID. + """ + return TaskAssignmentRepository.delete_assignment(task_id, user_id) diff --git a/todo/services/task_service.py b/todo/services/task_service.py index 70de8c86..18444b20 100644 --- a/todo/services/task_service.py +++ b/todo/services/task_service.py @@ -1,24 +1,51 @@ from typing import List from dataclasses import dataclass -from django.core.paginator import Paginator, EmptyPage from django.core.exceptions import ValidationError from django.urls import reverse_lazy from urllib.parse import urlencode from datetime import datetime, timezone - +from todo.dto.deferred_details_dto import DeferredDetailsDTO from todo.dto.label_dto import LabelDTO from todo.dto.task_dto import TaskDTO, CreateTaskDTO +from todo.dto.task_assignment_dto import CreateTaskAssignmentDTO from todo.dto.user_dto import UserDTO from todo.dto.responses.get_tasks_response import GetTasksResponse from todo.dto.responses.create_task_response import CreateTaskResponse -from todo.dto.responses.error_response import ApiErrorResponse, ApiErrorDetail, ApiErrorSource + +from todo.dto.responses.error_response import ( + ApiErrorResponse, + ApiErrorDetail, + ApiErrorSource, +) from todo.dto.responses.paginated_response import LinksData -from todo.models.task import TaskModel +from todo.exceptions.user_exceptions import UserNotFoundException +from todo.models.task import TaskModel, DeferredDetailsModel +from todo.models.task_assignment import TaskAssignmentModel +from todo.repositories.task_assignment_repository import TaskAssignmentRepository +from todo.dto.task_assignment_dto import TaskAssignmentDTO +from todo.models.common.pyobjectid import PyObjectId from todo.repositories.task_repository import TaskRepository from todo.repositories.label_repository import LabelRepository -from todo.constants.task import TaskStatus +from todo.repositories.team_repository import TeamRepository +from todo.constants.task import ( + TaskStatus, + TaskPriority, +) from todo.constants.messages import ApiErrors, ValidationErrors from django.conf import settings +from todo.exceptions.task_exceptions import ( + TaskNotFoundException, + UnprocessableEntityException, + TaskStateConflictException, +) +from bson.errors import InvalidId as BsonInvalidId + +from todo.repositories.user_repository import UserRepository +from todo.repositories.watchlist_repository import WatchlistRepository +import math +from todo.models.audit_log import AuditLogModel +from todo.repositories.audit_log_repository import AuditLogRepository +from todo.services.task_assignment_service import TaskAssignmentService @dataclass @@ -29,42 +56,71 @@ class PaginationConfig: class TaskService: + DIRECT_ASSIGNMENT_FIELDS = { + "title", + "description", + "dueAt", + "startedAt", + "isAcknowledged", + } + @classmethod def get_tasks( - cls, page: int = PaginationConfig.DEFAULT_PAGE, limit: int = PaginationConfig.DEFAULT_LIMIT + cls, + page: int, + limit: int, + sort_by: str, + order: str, + user_id: str, + team_id: str = None, + status_filter: str = None, ) -> GetTasksResponse: try: cls._validate_pagination_params(page, limit) - tasks = TaskRepository.get_all() + # If team_id is provided, only allow team members to fetch tasks + if team_id: + from todo.repositories.team_repository import TeamRepository + + if not TeamRepository.is_user_team_member(team_id, user_id): + return GetTasksResponse( + tasks=[], + links=None, + error={ + "message": "Only team members can view team tasks.", + "code": "FORBIDDEN", + }, + ) + + tasks = TaskRepository.list( + page, limit, sort_by, order, user_id, team_id=team_id, status_filter=status_filter + ) + total_count = TaskRepository.count(user_id, team_id=team_id, status_filter=status_filter) if not tasks: return GetTasksResponse(tasks=[], links=None) - paginator = Paginator(tasks, limit) - - try: - current_page = paginator.page(page) + task_dtos = [cls.prepare_task_dto(task, user_id) for task in tasks] - task_dtos = [cls.prepare_task_dto(task) for task in current_page.object_list] + links = cls._build_pagination_links(page, limit, total_count, sort_by, order) - links = cls._prepare_pagination_links(current_page=current_page, page=page, limit=limit) - - return GetTasksResponse(tasks=task_dtos, links=links) - - except EmptyPage: - return GetTasksResponse( - tasks=[], - links=None, - error={"message": "Requested page exceeds available results", "code": "PAGE_NOT_FOUND"}, - ) + return GetTasksResponse(tasks=task_dtos, links=links) except ValidationError as e: - return GetTasksResponse(tasks=[], links=None, error={"message": str(e), "code": "VALIDATION_ERROR"}) + return GetTasksResponse( + tasks=[], + links=None, + error={"message": str(e), "code": "VALIDATION_ERROR"}, + ) except Exception: return GetTasksResponse( - tasks=[], links=None, error={"message": "An unexpected error occurred", "code": "INTERNAL_ERROR"} + tasks=[], + links=None, + error={ + "message": ApiErrors.UNEXPECTED_ERROR_OCCURRED, + "code": "INTERNAL_ERROR", + }, ) @classmethod @@ -79,46 +135,65 @@ def _validate_pagination_params(cls, page: int, limit: int) -> None: raise ValidationError(f"Maximum limit of {PaginationConfig.MAX_LIMIT} exceeded") @classmethod - def _prepare_pagination_links(cls, current_page, page: int, limit: int) -> LinksData: + def _build_pagination_links(cls, page: int, limit: int, total_count: int, sort_by: str, order: str) -> LinksData: + """Build pagination links with sort parameters""" + + total_pages = math.ceil(total_count / limit) next_link = None prev_link = None - if current_page.has_next(): - next_page = current_page.next_page_number() - next_link = cls.build_page_url(next_page, limit) + if page < total_pages: + next_link = cls.build_page_url(page + 1, limit, sort_by, order) - if current_page.has_previous(): - prev_page = current_page.previous_page_number() - prev_link = cls.build_page_url(prev_page, limit) + if page > 1: + prev_link = cls.build_page_url(page - 1, limit, sort_by, order) return LinksData(next=next_link, prev=prev_link) @classmethod - def build_page_url(cls, page: int, limit: int) -> str: + def build_page_url(cls, page: int, limit: int, sort_by: str, order: str) -> str: base_url = reverse_lazy("tasks") - query_params = urlencode({"page": page, "limit": limit}) + query_params = urlencode({"page": page, "limit": limit, "sort_by": sort_by, "order": order}) return f"{base_url}?{query_params}" @classmethod - def prepare_task_dto(cls, task_model: TaskModel) -> TaskDTO: + def prepare_task_dto(cls, task_model: TaskModel, user_id: str = None) -> TaskDTO: label_dtos = cls._prepare_label_dtos(task_model.labels) if task_model.labels else [] - - assignee = cls.prepare_user_dto(task_model.assignee) if task_model.assignee else None - created_by = cls.prepare_user_dto(task_model.createdBy) + created_by = cls.prepare_user_dto(task_model.createdBy) if task_model.createdBy else None updated_by = cls.prepare_user_dto(task_model.updatedBy) if task_model.updatedBy else None + deferred_details = ( + cls.prepare_deferred_details_dto(task_model.deferredDetails) if task_model.deferredDetails else None + ) + + assignee_details = TaskAssignmentRepository.get_by_task_id(str(task_model.id)) + assignee_dto = cls._prepare_assignee_dto(assignee_details) if assignee_details else None + + # Check if task is in user's watchlist + in_watchlist = None + if user_id: + watchlist_entry = WatchlistRepository.get_by_user_and_task(user_id, str(task_model.id)) + if watchlist_entry: + in_watchlist = watchlist_entry.isActive + + task_status = task_model.status + + if task_model.deferredDetails and task_model.deferredDetails.deferredTill > datetime.now(timezone.utc): + task_status = TaskStatus.DEFERRED.value return TaskDTO( id=str(task_model.id), displayId=task_model.displayId, title=task_model.title, description=task_model.description, - assignee=assignee, + assignee=assignee_dto, isAcknowledged=task_model.isAcknowledged, labels=label_dtos, startedAt=task_model.startedAt, dueAt=task_model.dueAt, - status=task_model.status, + status=task_status, priority=task_model.priority, + deferredDetails=deferred_details, + in_watchlist=in_watchlist, createdAt=task_model.createdAt, updatedAt=task_model.updatedAt, createdBy=created_by, @@ -131,65 +206,444 @@ def _prepare_label_dtos(cls, label_ids: List[str]) -> List[LabelDTO]: return [ LabelDTO( + id=str(label_model.id), name=label_model.name, color=label_model.color, - createdAt=label_model.createdAt, - updatedAt=label_model.updatedAt if hasattr(label_model, "updatedAt") else None, - createdBy=cls.prepare_user_dto(label_model.createdBy), - updatedBy=cls.prepare_user_dto(label_model.updatedBy) - if hasattr(label_model, "updatedBy") and label_model.updatedBy - else None, ) for label_model in label_models ] + @classmethod + def _prepare_assignee_dto(cls, assignee_details: TaskAssignmentModel) -> TaskAssignmentDTO: + """Prepare assignee DTO from assignee task details.""" + assignee_id = str(assignee_details.assignee_id) + + # Get assignee details based on user_type + if assignee_details.user_type == "user": + assignee = UserRepository.get_by_id(assignee_id) + elif assignee_details.user_type == "team": + assignee = TeamRepository.get_by_id(assignee_id) + else: + return None + + if not assignee: + return None + + return TaskAssignmentDTO( + id=str(assignee_details.id), + task_id=str(assignee_details.task_id), + assignee_id=assignee_id, + assignee_name=assignee.name, + user_type=assignee_details.user_type, + executor_id=str(assignee_details.executor_id) if assignee_details.executor_id else None, + team_id=str(assignee_details.team_id) if assignee_details.team_id else None, + is_active=assignee_details.is_active, + created_by=str(assignee_details.created_by), + updated_by=str(assignee_details.updated_by) if assignee_details.updated_by else None, + created_at=assignee_details.created_at, + updated_at=assignee_details.updated_at, + ) + + @classmethod + def prepare_deferred_details_dto(cls, deferred_details_model: DeferredDetailsModel) -> DeferredDetailsDTO | None: + if not deferred_details_model: + return None + + deferred_by_user = cls.prepare_user_dto(deferred_details_model.deferredBy) + + return DeferredDetailsDTO( + deferredAt=deferred_details_model.deferredAt, + deferredTill=deferred_details_model.deferredTill, + deferredBy=deferred_by_user, + ) + @classmethod def prepare_user_dto(cls, user_id: str) -> UserDTO: - return UserDTO(id=user_id, name="SYSTEM") + user = UserRepository.get_by_id(user_id) + if user: + return UserDTO(id=str(user_id), name=user.name) + raise UserNotFoundException(user_id) + + @classmethod + def get_task_by_id(cls, task_id: str) -> TaskDTO: + try: + task_model = TaskRepository.get_by_id(task_id) + if not task_model: + raise TaskNotFoundException(task_id) + return cls.prepare_task_dto(task_model, user_id=None) + except BsonInvalidId as exc: + raise exc + + @classmethod + def _process_labels_for_update(cls, raw_labels: list | None) -> list[PyObjectId]: + if raw_labels is None: + return [] + + label_object_ids = [PyObjectId(label_id_str) for label_id_str in raw_labels] + return label_object_ids + + @classmethod + def _process_enum_for_update(cls, enum_type: type, value: str | None) -> str | None: + if value is None: + return None + return enum_type[value].value + + @classmethod + def update_task(cls, task_id: str, validated_data: dict, user_id: str) -> TaskDTO: + current_task = TaskRepository.get_by_id(task_id) + + if not current_task: + raise TaskNotFoundException(task_id) + + # Check if user is the creator + if current_task.createdBy != user_id: + # Check if user is assigned to this task + assigned_task_ids = TaskRepository._get_assigned_task_ids_for_user(user_id) + if current_task.id not in assigned_task_ids: + raise PermissionError(ApiErrors.UNAUTHORIZED_TITLE) + + # Handle assignee updates if provided + if validated_data.get("assignee"): + assignee_info = validated_data["assignee"] + assignee_id = assignee_info.get("assignee_id") + user_type = assignee_info.get("user_type") + + if user_type == "user": + assignee_data = UserRepository.get_by_id(assignee_id) + if not assignee_data: + raise UserNotFoundException(assignee_id) + elif user_type == "team": + team_data = TeamRepository.get_by_id(assignee_id) + if not team_data: + raise ValueError(f"Team not found: {assignee_id}") + + # Track status change for audit log + old_status = getattr(current_task, "status", None) + new_status = validated_data.get("status") + + update_payload = {} + enum_fields = {"priority": TaskPriority, "status": TaskStatus} + + for field, value in validated_data.items(): + if field == "labels": + update_payload[field] = cls._process_labels_for_update( + value + ) # Only convert to ObjectId, do not check existence + elif field in enum_fields: + update_payload[field] = cls._process_enum_for_update(enum_fields[field], value) + elif field in cls.DIRECT_ASSIGNMENT_FIELDS: + update_payload[field] = value + + # Handle assignee updates separately + if "assignee" in validated_data: + assignee_info = validated_data["assignee"] + TaskAssignmentRepository.update_assignee( + task_id, + assignee_info["assignee_id"], + assignee_info["user_type"], + user_id, + ) + + if not update_payload: + return cls.prepare_task_dto(current_task, user_id) + + update_payload["updatedBy"] = user_id + updated_task = TaskRepository.update(task_id, update_payload) + + # Audit log for status change + if old_status and new_status and old_status != new_status: + AuditLogRepository.create( + AuditLogModel( + task_id=current_task.id, + action="status_changed", + status_from=old_status, + status_to=new_status, + performed_by=PyObjectId(user_id), + ) + ) + + if not updated_task: + raise TaskNotFoundException(task_id) + + return cls.prepare_task_dto(updated_task, user_id) + + @classmethod + def update_task_with_assignee_from_dict(cls, task_id: str, validated_data: dict, user_id: str) -> TaskDTO: + """ + Update both task details and assignee information in a single operation using validated data dict. + This allows for true partial updates without requiring all fields. + """ + current_task = TaskRepository.get_by_id(task_id) + + if not current_task: + raise TaskNotFoundException(task_id) + + # Check if user is the creator + if current_task.createdBy != user_id: + # Check if user is assigned to this task + assigned_task_ids = TaskRepository._get_assigned_task_ids_for_user(user_id) + if current_task.id not in assigned_task_ids: + raise PermissionError(ApiErrors.UNAUTHORIZED_TITLE) + + # Validate assignee if provided + if validated_data.get("assignee"): + assignee_info = validated_data["assignee"] + assignee_id = assignee_info.get("assignee_id") + user_type = assignee_info.get("user_type") + + if user_type == "user": + user_data = UserRepository.get_by_id(assignee_id) + if not user_data: + raise UserNotFoundException(assignee_id) + elif user_type == "team": + team_data = TeamRepository.get_by_id(assignee_id) + if not team_data: + raise ValueError(f"Team not found: {assignee_id}") + + # Prepare update payload for task fields + update_payload = {} + enum_fields = {"priority": TaskPriority, "status": TaskStatus} + + # Process task fields from validated_data + for field, value in validated_data.items(): + if field == "assignee": + continue # Handle assignee separately + + # Skip if the value is the same as current task + current_value = getattr(current_task, field, None) + if current_value == value: + continue + + if field == "labels": + update_payload[field] = cls._process_labels_for_update(value) + elif field in enum_fields: + # For enums, we need to get the name if it's an enum instance, or process as string + if hasattr(value, "name"): + update_payload[field] = value.value + else: + update_payload[field] = cls._process_enum_for_update(enum_fields[field], value) + elif field in cls.DIRECT_ASSIGNMENT_FIELDS: + update_payload[field] = value + + # Handle startedAt logic + if validated_data.get("status") == TaskStatus.IN_PROGRESS and not current_task.startedAt: + update_payload["startedAt"] = datetime.now(timezone.utc) + + if ( + validated_data.get("status") is not None + and validated_data.get("status") != TaskStatus.DEFERRED.value + and current_task.deferredDetails + ): + update_payload["deferredDetails"] = None + + if validated_data.get("status") == TaskStatus.DEFERRED.value: + update_payload["status"] = current_task.status + + # Update task if there are changes + if update_payload: + update_payload["updatedBy"] = user_id + updated_task = TaskRepository.update(task_id, update_payload) + if not updated_task: + raise TaskNotFoundException(task_id) + else: + updated_task = current_task + + # Handle assignee updates + if validated_data.get("assignee"): + TaskAssignmentRepository.update_assignment( + task_id, + validated_data["assignee"]["assignee_id"], + validated_data["assignee"]["user_type"], + user_id, + ) + + return cls.prepare_task_dto(updated_task, user_id) + + @classmethod + def update_task_with_assignee(cls, task_id: str, dto: CreateTaskDTO, user_id: str) -> TaskDTO: + """ + Update both task details and assignee information in a single operation. + Similar to create_task but for updates. + """ + current_task = TaskRepository.get_by_id(task_id) + + if not current_task: + raise TaskNotFoundException(task_id) + + # Check if user is the creator + if current_task.createdBy != user_id: + # Check if user is assigned to this task + assigned_task_ids = TaskRepository._get_assigned_task_ids_for_user(user_id) + if current_task.id not in assigned_task_ids: + raise PermissionError(ApiErrors.UNAUTHORIZED_TITLE) + + # Validate assignee if provided + if dto.assignee: + assignee_id = dto.assignee.get("assignee_id") + user_type = dto.assignee.get("user_type") + + if user_type == "user": + user_data = UserRepository.get_by_id(assignee_id) + if not user_data: + raise UserNotFoundException(assignee_id) + elif user_type == "team": + team_data = TeamRepository.get_by_id(assignee_id) + if not team_data: + raise ValueError(f"Team not found: {assignee_id}") + + # Prepare update payload for task fields + update_payload = {} + enum_fields = {"priority": TaskPriority, "status": TaskStatus} + + # Process task fields from DTO + dto_data = dto.model_dump(exclude_none=True, exclude={"assignee", "createdBy"}) + + for field, value in dto_data.items(): + # Skip if the value is the same as current task + current_value = getattr(current_task, field, None) + if current_value == value: + continue + + if field == "labels": + update_payload[field] = cls._process_labels_for_update(value) + elif field in enum_fields: + # For enums, we need to get the name if it's an enum instance, or process as string + if hasattr(value, "name"): + update_payload[field] = value.value + else: + update_payload[field] = cls._process_enum_for_update(enum_fields[field], value) + elif field in cls.DIRECT_ASSIGNMENT_FIELDS: + update_payload[field] = value + + # Handle startedAt logic + if dto.status == TaskStatus.IN_PROGRESS and not current_task.startedAt: + update_payload["startedAt"] = datetime.now(timezone.utc) + + # Update task if there are changes + if update_payload: + update_payload["updatedBy"] = user_id + updated_task = TaskRepository.update(task_id, update_payload) + if not updated_task: + raise TaskNotFoundException(task_id) + else: + updated_task = current_task + + # Handle assignee updates + if dto.assignee: + TaskAssignmentRepository.update_assignment( + task_id, + dto.assignee["assignee_id"], + dto.assignee["user_type"], + user_id, + ) + + return cls.prepare_task_dto(updated_task, user_id) + + @classmethod + def defer_task(cls, task_id: str, deferred_till: datetime, user_id: str) -> TaskDTO: + current_task = TaskRepository.get_by_id(task_id) + + if not current_task: + raise TaskNotFoundException(task_id) + + # Check if user is the creator + if current_task.createdBy != user_id: + # Check if user is assigned to this task + assigned_task_ids = TaskRepository._get_assigned_task_ids_for_user(user_id) + if current_task.id not in assigned_task_ids: + raise PermissionError(ApiErrors.UNAUTHORIZED_TITLE) + + if current_task.status == TaskStatus.DONE: + raise TaskStateConflictException(ValidationErrors.CANNOT_DEFER_A_DONE_TASK) + + if deferred_till.tzinfo is None: + deferred_till = deferred_till.replace(tzinfo=timezone.utc) + + if current_task.dueAt: + due_at = ( + current_task.dueAt.replace(tzinfo=timezone.utc) + if current_task.dueAt.tzinfo is None + else current_task.dueAt.astimezone(timezone.utc) + ) + + if deferred_till >= due_at: + raise UnprocessableEntityException( + ValidationErrors.CANNOT_DEFER_TOO_CLOSE_TO_DUE_DATE, + source={ApiErrorSource.PARAMETER: "deferredTill"}, + ) + + deferred_details = DeferredDetailsModel( + deferredAt=datetime.now(timezone.utc), + deferredTill=deferred_till, + deferredBy=user_id, + ) + + update_payload = { + "status": TaskStatus.TODO.value, + "deferredDetails": deferred_details.model_dump(), + "updatedBy": user_id, + } + + updated_task = TaskRepository.update(task_id, update_payload) + if not updated_task: + raise TaskNotFoundException(task_id) + + return cls.prepare_task_dto(updated_task, user_id) @classmethod def create_task(cls, dto: CreateTaskDTO) -> CreateTaskResponse: now = datetime.now(timezone.utc) started_at = now if dto.status == TaskStatus.IN_PROGRESS else None - if dto.labels: - existing_labels = LabelRepository.list_by_ids(dto.labels) - if len(existing_labels) != len(dto.labels): - found_ids = [str(label.id) for label in existing_labels] - missing_ids = [label_id for label_id in dto.labels if label_id not in found_ids] - - raise ValueError( - ApiErrorResponse( - statusCode=400, - message=ApiErrors.INVALID_LABELS, - errors=[ - ApiErrorDetail( - source={ApiErrorSource.PARAMETER: "labels"}, - title=ApiErrors.INVALID_LABEL_IDS, - detail=ValidationErrors.MISSING_LABEL_IDS.format(", ".join(missing_ids)), - ) - ], - ) - ) + # Validate assignee + if dto.assignee: + assignee_id = dto.assignee.get("assignee_id") + user_type = dto.assignee.get("user_type") + + if user_type == "user": + user = UserRepository.get_by_id(assignee_id) + if not user: + raise UserNotFoundException(assignee_id) + elif user_type == "team": + team = TeamRepository.get_by_id(assignee_id) + if not team: + raise ValueError(f"Team not found: {assignee_id}") + + # Removed label existence check task = TaskModel( + id=None, title=dto.title, description=dto.description, priority=dto.priority, status=dto.status, - assignee=dto.assignee, labels=dto.labels, dueAt=dto.dueAt, startedAt=started_at, createdAt=now, isAcknowledged=False, isDeleted=False, - createdBy="system", # placeholder, will be user_id when auth is in place + createdBy=dto.createdBy, # placeholder, will be user_id when auth is in place ) try: created_task = TaskRepository.create(task) - task_dto = cls.prepare_task_dto(created_task) + + # Create assignee relationship if assignee is provided + team_id = None + if dto.assignee and dto.assignee.get("user_type") == "team": + team_id = dto.assignee.get("assignee_id") + + if dto.assignee: + assignee_dto = CreateTaskAssignmentDTO( + task_id=str(created_task.id), + assignee_id=dto.assignee.get("assignee_id"), + user_type=dto.assignee.get("user_type"), + team_id=team_id, + ) + TaskAssignmentService.create_task_assignment(assignee_dto, created_task.createdBy) + + task_dto = cls.prepare_task_dto(created_task, dto.createdBy) return CreateTaskResponse(data=task_dto) except ValueError as e: if isinstance(e.args[0], ApiErrorResponse): @@ -202,7 +656,7 @@ def create_task(cls, dto: CreateTaskDTO) -> CreateTaskResponse: ApiErrorDetail( source={ApiErrorSource.PARAMETER: "task_repository"}, title=ApiErrors.UNEXPECTED_ERROR, - detail=str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR, + detail=(str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR), ) ], ) @@ -216,8 +670,31 @@ def create_task(cls, dto: CreateTaskDTO) -> CreateTaskResponse: ApiErrorDetail( source={ApiErrorSource.PARAMETER: "server"}, title=ApiErrors.UNEXPECTED_ERROR, - detail=str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR, + detail=(str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR), ) ], ) ) + + @classmethod + def delete_task(cls, task_id: str, user_id: str) -> None: + deleted_task_model = TaskRepository.delete_by_id(task_id, user_id) + if deleted_task_model is None: + raise TaskNotFoundException(task_id) + return None + + @classmethod + def get_tasks_for_user( + cls, + user_id: str, + page: int = PaginationConfig.DEFAULT_PAGE, + limit: int = PaginationConfig.DEFAULT_LIMIT, + status_filter: str = None, + ) -> GetTasksResponse: + cls._validate_pagination_params(page, limit) + tasks = TaskRepository.get_tasks_for_user(user_id, page, limit, status_filter=status_filter) + if not tasks: + return GetTasksResponse(tasks=[], links=None) + + task_dtos = [cls.prepare_task_dto(task, user_id) for task in tasks] + return GetTasksResponse(tasks=task_dtos, links=None) diff --git a/todo/services/team_creation_invite_code_service.py b/todo/services/team_creation_invite_code_service.py new file mode 100644 index 00000000..1b9aca52 --- /dev/null +++ b/todo/services/team_creation_invite_code_service.py @@ -0,0 +1,76 @@ +from todo.repositories.team_creation_invite_code_repository import TeamCreationInviteCodeRepository +from todo.repositories.audit_log_repository import AuditLogRepository +from todo.models.team_creation_invite_code import TeamCreationInviteCodeModel +from todo.models.audit_log import AuditLogModel +from todo.models.common.pyobjectid import PyObjectId +from todo.dto.team_creation_invite_code_dto import GenerateTeamCreationInviteCodeDTO +from todo.dto.responses.generate_team_creation_invite_code_response import GenerateTeamCreationInviteCodeResponse +from todo.dto.responses.get_team_creation_invite_codes_response import ( + GetTeamCreationInviteCodesResponse, + TeamCreationInviteCodeListItemDTO, +) +from todo.utils.invite_code_utils import generate_invite_code + + +class TeamCreationInviteCodeService: + """Service for team creation invite code operations.""" + + @classmethod + def generate_code( + cls, dto: GenerateTeamCreationInviteCodeDTO, created_by: str + ) -> GenerateTeamCreationInviteCodeResponse: + """Generate a new team creation invite code.""" + code = generate_invite_code("team creation invite code") + + team_invite_code = TeamCreationInviteCodeModel(code=code, description=dto.description, created_by=created_by) + + saved_code = TeamCreationInviteCodeRepository.create(team_invite_code) + + AuditLogRepository.create( + AuditLogModel( + action="team_creation_invite_code_generated", + performed_by=PyObjectId(created_by), + ) + ) + + return GenerateTeamCreationInviteCodeResponse( + code=saved_code.code, + description=saved_code.description, + message="Team creation invite code generated successfully", + ) + + @classmethod + def get_all_codes(cls, page: int = 1, limit: int = 10, base_url: str = "") -> GetTeamCreationInviteCodesResponse: + """Get paginated team creation invite codes with user details.""" + try: + codes_data, total_count = TeamCreationInviteCodeRepository.get_all_codes(page, limit) + + codes = [] + for code_data in codes_data: + code_dto = TeamCreationInviteCodeListItemDTO( + id=code_data["id"], + code=code_data["code"], + description=code_data.get("description"), + created_by=code_data["created_by"], + created_at=code_data["created_at"], + used_at=code_data.get("used_at"), + used_by=code_data.get("used_by"), + is_used=code_data["is_used"], + ) + codes.append(code_dto) + + total_pages = (total_count + limit - 1) // limit + has_next = page < total_pages + has_previous = page > 1 + + previous_url = f"{base_url}?page={page-1}&limit={limit}" if has_previous else None + next_url = f"{base_url}?page={page+1}&limit={limit}" if has_next else None + + return GetTeamCreationInviteCodesResponse( + codes=codes, + previous_url=previous_url, + next_url=next_url, + message="Team creation invite codes retrieved successfully", + ) + except Exception as e: + raise ValueError(f"Failed to get team creation invite codes: {str(e)}") diff --git a/todo/services/team_service.py b/todo/services/team_service.py new file mode 100644 index 00000000..134ece79 --- /dev/null +++ b/todo/services/team_service.py @@ -0,0 +1,496 @@ +from todo.dto.team_dto import CreateTeamDTO, TeamDTO +from todo.dto.update_team_dto import UpdateTeamDTO +from todo.dto.responses.create_team_response import CreateTeamResponse +from todo.dto.responses.get_user_teams_response import GetUserTeamsResponse +from todo.models.team import TeamModel, UserTeamDetailsModel +from todo.models.common.pyobjectid import PyObjectId +from todo.repositories.team_creation_invite_code_repository import TeamCreationInviteCodeRepository +from todo.repositories.team_repository import TeamRepository, UserTeamDetailsRepository +from todo.constants.messages import AppMessages +from todo.utils.invite_code_utils import generate_invite_code +from typing import List +from todo.models.audit_log import AuditLogModel +from todo.repositories.audit_log_repository import AuditLogRepository +from todo.dto.responses.error_response import ApiErrorResponse, ApiErrorDetail + +DEFAULT_ROLE_ID = "1" + + +class TeamService: + @classmethod + def create_team(cls, dto: CreateTeamDTO, created_by_user_id: str) -> CreateTeamResponse: + """ + Create a new team with members and POC. + + Args: + dto: Team creation data including name, description, POC, members, and team invite code + created_by_user_id: ID of the user creating the team + + Returns: + CreateTeamResponse with the created team details and success message + + Raises: + ValueError: If team creation fails or invite code is invalid + """ + try: + # Member IDs and POC ID validation is handled at DTO level + + code_data = TeamCreationInviteCodeRepository.validate_and_consume_code( + dto.team_invite_code, created_by_user_id + ) + if not code_data: + raise ValueError( + ApiErrorResponse( + statusCode=400, + message="Invalid or already used team creation code. Please enter a valid code.", + errors=[ApiErrorDetail(detail="Invalid team creation code")], + ) + ) + + member_ids = dto.member_ids or [] + + # Generate invite code + invite_code = generate_invite_code(dto.name) + + # Create team + team = TeamModel( + name=dto.name, + description=dto.description if dto.description else None, + poc_id=PyObjectId(dto.poc_id) if dto.poc_id else None, + invite_code=invite_code, + created_by=PyObjectId(created_by_user_id), + updated_by=PyObjectId(created_by_user_id), + ) + + created_team = TeamRepository.create(team) + + # Create user-team relationships + user_teams = [] + + # Add members to the team + for member_id in member_ids: + user_team = UserTeamDetailsModel( + user_id=PyObjectId(member_id), + team_id=created_team.id, + role_id=DEFAULT_ROLE_ID, + is_active=True, + created_by=PyObjectId(created_by_user_id), + updated_by=PyObjectId(created_by_user_id), + ) + user_teams.append(user_team) + + # Add POC to the team if specified and not already in members + if dto.poc_id and dto.poc_id not in member_ids: + user_team = UserTeamDetailsModel( + user_id=PyObjectId(dto.poc_id), + team_id=created_team.id, + role_id=DEFAULT_ROLE_ID, + is_active=True, + created_by=PyObjectId(created_by_user_id), + updated_by=PyObjectId(created_by_user_id), + ) + user_teams.append(user_team) + + # Always add the creator as a member if not already in member_ids or as POC + if created_by_user_id not in member_ids and created_by_user_id != dto.poc_id: + user_team = UserTeamDetailsModel( + user_id=PyObjectId(created_by_user_id), + team_id=created_team.id, + role_id=DEFAULT_ROLE_ID, + is_active=True, + created_by=PyObjectId(created_by_user_id), + updated_by=PyObjectId(created_by_user_id), + ) + user_teams.append(user_team) + + if user_teams: + UserTeamDetailsRepository.create_many(user_teams) + + team_id_str = str(created_team.id) + + cls._assign_user_role(created_by_user_id, team_id_str, "owner") + + for member_id in member_ids: + if member_id != created_by_user_id: + cls._assign_user_role(member_id, team_id_str, "member") + + if dto.poc_id and dto.poc_id != created_by_user_id: + cls._assign_user_role(dto.poc_id, team_id_str, "owner") + + # Audit log for team creation + AuditLogRepository.create( + AuditLogModel( + team_id=created_team.id, + action="team_created", + performed_by=PyObjectId(created_by_user_id), + ) + ) + + # Convert to DTO + team_dto = TeamDTO( + id=str(created_team.id), + name=created_team.name, + description=created_team.description, + poc_id=str(created_team.poc_id) if created_team.poc_id else None, + invite_code=created_team.invite_code, + created_by=str(created_team.created_by), + updated_by=str(created_team.updated_by), + created_at=created_team.created_at, + updated_at=created_team.updated_at, + ) + + AuditLogRepository.create( + AuditLogModel( + action="team_creation_invite_code_consumed", + performed_by=PyObjectId(created_by_user_id), + team_id=created_team.id, + ) + ) + return CreateTeamResponse( + team=team_dto, + message=AppMessages.TEAM_CREATED, + ) + + except Exception as e: + raise ValueError(f"Failed to create team: {str(e)}") + + @classmethod + def _assign_user_role(cls, user_id: str, team_id: str, role_name: str): + """Helper method to assign user roles using the new role system.""" + try: + from todo.services.user_role_service import UserRoleService + + UserRoleService.assign_role(user_id, role_name, "TEAM", team_id) + except Exception: + # Don't fail team creation if role assignment fails + import logging + + logger = logging.getLogger(__name__) + logger.warning(f"Failed to assign role {role_name} to user {user_id} in team {team_id}") + + @classmethod + def get_user_teams(cls, user_id: str) -> GetUserTeamsResponse: + """ + Get all teams assigned to a specific user. + + Args: + user_id: ID of the user to get teams for + + Returns: + GetUserTeamsResponse with the list of teams and total count + + Raises: + ValueError: If getting user teams fails + """ + try: + # Get user-team relationships + user_team_details = UserTeamDetailsRepository.get_by_user_id(user_id) + + if not user_team_details: + return GetUserTeamsResponse(teams=[], total=0) + + # Get team details for each relationship + teams = [] + for user_team in user_team_details: + team = TeamRepository.get_by_id(str(user_team.team_id)) + if team: + team_dto = TeamDTO( + id=str(team.id), + name=team.name, + description=team.description, + poc_id=str(team.poc_id) if team.poc_id else None, + invite_code=team.invite_code, + created_by=str(team.created_by), + updated_by=str(team.updated_by), + created_at=team.created_at, + updated_at=team.updated_at, + ) + teams.append(team_dto) + + return GetUserTeamsResponse(teams=teams, total=len(teams)) + + except Exception as e: + raise ValueError(f"Failed to get user teams: {str(e)}") + + @classmethod + def get_team_by_id(cls, team_id: str) -> TeamDTO: + """ + Get a team by its ID. + + Args: + team_id: ID of the team to retrieve + + Returns: + TeamDTO with the team details + + Raises: + ValueError: If the team is not found + """ + team = TeamRepository.get_by_id(team_id) + if not team: + raise ValueError(f"Team with id {team_id} not found") + return TeamDTO( + id=str(team.id), + name=team.name, + description=team.description, + poc_id=str(team.poc_id) if team.poc_id else None, + invite_code=team.invite_code, + created_by=str(team.created_by), + updated_by=str(team.updated_by), + created_at=team.created_at, + updated_at=team.updated_at, + ) + + @classmethod + def join_team_by_invite_code(cls, invite_code: str, user_id: str) -> TeamDTO: + """ + Join a team using an invite code. + + Args: + invite_code: The invite code for the team + user_id: The user who wants to join + + Returns: + TeamDTO with the team details + + Raises: + ValueError: If invite code is invalid, team not found, or user already a member + """ + # 1. Find the team by invite code + team = TeamRepository.get_by_invite_code(invite_code) + if not team: + raise ValueError("Invalid invite code or team does not exist.") + + # 2. Check if user is already a member + from todo.repositories.team_repository import UserTeamDetailsRepository + + user_teams = UserTeamDetailsRepository.get_by_user_id(user_id) + for user_team in user_teams: + if str(user_team.team_id) == str(team.id) and user_team.is_active: + raise ValueError("User is already a member of this team.") + + # 3. Add user to the team (ORIGINAL SYSTEM) + from todo.models.common.pyobjectid import PyObjectId + from todo.models.team import UserTeamDetailsModel + + user_team = UserTeamDetailsModel( + user_id=PyObjectId(user_id), + team_id=team.id, + role_id=DEFAULT_ROLE_ID, + is_active=True, + created_by=PyObjectId(user_id), + updated_by=PyObjectId(user_id), + ) + UserTeamDetailsRepository.create(user_team) + + # NEW: Assign default member role using new role system + cls._assign_user_role(user_id, str(team.id), "member") + + # Audit log for team join + AuditLogRepository.create( + AuditLogModel( + team_id=team.id, + action="member_joined_team", + performed_by=PyObjectId(user_id), + ) + ) + + # 4. Return team details + return TeamDTO( + id=str(team.id), + name=team.name, + description=team.description, + poc_id=str(team.poc_id) if team.poc_id else None, + invite_code=team.invite_code, + created_by=str(team.created_by), + updated_by=str(team.updated_by), + created_at=team.created_at, + updated_at=team.updated_at, + ) + + @classmethod + def update_team(cls, team_id: str, dto: UpdateTeamDTO, updated_by_user_id: str) -> TeamDTO: + """ + Update a team by its ID. + + Args: + team_id: ID of the team to update + dto: Team update data including name, description, and POC + updated_by_user_id: ID of the user updating the team + + Returns: + TeamDTO with the updated team details + + Raises: + ValueError: If team update fails or team not found + """ + try: + # Check if team exists + existing_team = TeamRepository.get_by_id(team_id) + if not existing_team: + raise ValueError(f"Team with id {team_id} not found") + + # Prepare update data + update_data = {} + if dto.name is not None: + update_data["name"] = dto.name + if dto.description is not None: + update_data["description"] = dto.description + if dto.poc_id is not None: + update_data["poc_id"] = PyObjectId(dto.poc_id) if dto.poc_id and dto.poc_id.strip() else None + + # Update the team + updated_team = TeamRepository.update(team_id, update_data, updated_by_user_id) + if not updated_team: + raise ValueError(f"Failed to update team with id {team_id}") + + # Handle member updates if provided + if dto.member_ids is not None: + from todo.repositories.team_repository import UserTeamDetailsRepository + + success = UserTeamDetailsRepository.update_team_members(team_id, dto.member_ids, updated_by_user_id) + if not success: + raise ValueError(f"Failed to update team members for team with id {team_id}") + + # Audit log for team update + AuditLogRepository.create( + AuditLogModel( + team_id=PyObjectId(team_id), + action="team_updated", + performed_by=PyObjectId(updated_by_user_id), + ) + ) + + # Convert to DTO + return TeamDTO( + id=str(updated_team.id), + name=updated_team.name, + description=updated_team.description, + poc_id=str(updated_team.poc_id) if updated_team.poc_id else None, + invite_code=updated_team.invite_code, + created_by=str(updated_team.created_by), + updated_by=str(updated_team.updated_by), + created_at=updated_team.created_at, + updated_at=updated_team.updated_at, + ) + + except Exception as e: + raise ValueError(f"Failed to update team: {str(e)}") + + @classmethod + def add_team_members(cls, team_id: str, member_ids: List[str], added_by_user_id: str) -> TeamDTO: + """ + Add members to a team. Only existing team members can add new members. + + Args: + team_id: ID of the team to add members to + member_ids: List of user IDs to add to the team + added_by_user_id: ID of the user adding the members + + Returns: + TeamDTO with the updated team details + + Raises: + ValueError: If user is not a team member, team not found, or operation fails + """ + try: + # Check if team exists + team = TeamRepository.get_by_id(team_id) + if not team: + raise ValueError(f"Team with id {team_id} not found") + + # Check if the user adding members is already a team member + from todo.repositories.team_repository import UserTeamDetailsRepository + + user_teams = UserTeamDetailsRepository.get_by_user_id(added_by_user_id) + user_is_member = any(str(user_team.team_id) == team_id and user_team.is_active for user_team in user_teams) + + if not user_is_member: + raise ValueError("You must be a member of the team to add other members") + + # Validate that all users exist + from todo.repositories.user_repository import UserRepository + + for member_id in member_ids: + user = UserRepository.get_by_id(member_id) + if not user: + raise ValueError(f"User with id {member_id} not found") + + # Check if any users are already team members + existing_members = UserTeamDetailsRepository.get_users_by_team_id(team_id) + already_members = [member_id for member_id in member_ids if member_id in existing_members] + + if already_members: + raise ValueError(f"Users {', '.join(already_members)} are already team members") + + # Add new members to the team (ORIGINAL SYSTEM) + from todo.models.team import UserTeamDetailsModel + from todo.models.common.pyobjectid import PyObjectId + + new_user_teams = [] + for member_id in member_ids: + user_team = UserTeamDetailsModel( + user_id=PyObjectId(member_id), + team_id=team.id, + role_id=DEFAULT_ROLE_ID, + is_active=True, + created_by=PyObjectId(added_by_user_id), + updated_by=PyObjectId(added_by_user_id), + ) + new_user_teams.append(user_team) + + if new_user_teams: + UserTeamDetailsRepository.create_many(new_user_teams) + + # NEW: Assign default member roles using new role system + for member_id in member_ids: + cls._assign_user_role(member_id, team_id, "member") + + # Audit log for team member addition + for member_id in member_ids: + AuditLogRepository.create( + AuditLogModel( + team_id=team.id, + action="member_added_to_team", + performed_by=PyObjectId(added_by_user_id), + details={"added_member_id": member_id}, + ) + ) + + # Return updated team details + return TeamDTO( + id=str(team.id), + name=team.name, + description=team.description, + poc_id=str(team.poc_id) if team.poc_id else None, + invite_code=team.invite_code, + created_by=str(team.created_by), + updated_by=str(team.updated_by), + created_at=team.created_at, + updated_at=team.updated_at, + ) + + except Exception as e: + raise ValueError(f"Failed to add team members: {str(e)}") + + class TeamOrUserNotFound(Exception): + pass + + @classmethod + def remove_member_from_team(cls, user_id: str, team_id: str, removed_by_user_id: str = None): + from todo.repositories.user_team_details_repository import UserTeamDetailsRepository + + success = UserTeamDetailsRepository.remove_member_from_team(user_id=user_id, team_id=team_id) + if not success: + raise cls.TeamOrUserNotFound() + + # Audit log for team member removal + AuditLogRepository.create( + AuditLogModel( + team_id=PyObjectId(team_id), + action="member_removed_from_team", + performed_by=PyObjectId(removed_by_user_id) if removed_by_user_id else PyObjectId(user_id), + ) + ) + + return True diff --git a/todo/services/user_role_service.py b/todo/services/user_role_service.py new file mode 100644 index 00000000..5cb4bc97 --- /dev/null +++ b/todo/services/user_role_service.py @@ -0,0 +1,131 @@ +from typing import List, Dict, Any, Optional +import logging + +from todo.repositories.user_role_repository import UserRoleRepository +from todo.constants.role import DEFAULT_TEAM_ROLE, VALID_ROLE_NAMES_BY_SCOPE, RoleScope, RoleName + +logger = logging.getLogger(__name__) + + +class UserRoleService: + @classmethod + def _validate_role(cls, role_name: str, scope: str) -> bool: + """Validate if role_name is allowed for the given scope.""" + valid_roles = VALID_ROLE_NAMES_BY_SCOPE.get(scope, []) + return role_name in valid_roles + + @classmethod + def assign_role(cls, user_id: str, role_name: str, scope: str, team_id: Optional[str] = None) -> bool: + try: + if not user_id or not user_id.strip(): + logger.error("user_id is required") + return False + + if not cls._validate_role(role_name, scope): + logger.error(f"Invalid role '{role_name}' for scope '{scope}'") + return False + + if scope == "TEAM" and not team_id: + logger.error("team_id is required for TEAM scope roles") + return False + + if scope == "GLOBAL" and team_id: + logger.error("team_id should not be provided for GLOBAL scope roles") + return False + + role_enum = RoleName(role_name) + scope_enum = RoleScope(scope) + + UserRoleRepository.assign_role(user_id, role_enum, scope_enum, team_id) + return True + except Exception as e: + logger.error(f"Failed to assign role: {str(e)}") + return False + + @classmethod + def remove_role_by_id(cls, user_id: str, role_id: str, scope: str, team_id: Optional[str] = None) -> bool: + try: + return UserRoleRepository.remove_role_by_id(user_id, role_id, scope, team_id) + except Exception as e: + logger.error(f"Failed to remove role: {str(e)}") + return False + + @classmethod + def get_user_roles( + cls, user_id: Optional[str] = None, scope: Optional[str] = None, team_id: Optional[str] = None + ) -> List[Dict[str, Any]]: + try: + scope_enum = RoleScope(scope) if scope else None + + user_roles = UserRoleRepository.get_user_roles(user_id, scope_enum, team_id) + + result = [] + for role in user_roles: + role_name_value = role.role_name.value if hasattr(role.role_name, "value") else role.role_name + scope_value = role.scope.value if hasattr(role.scope, "value") else role.scope + + role_dict = { + "role_id": str(role.id), + "role_name": role_name_value, + "scope": scope_value, + "team_id": role.team_id, + "assigned_at": role.created_at, + } + result.append(role_dict) + + return result + except Exception as e: + logger.error(f"Failed to get user roles: {str(e)}") + return [] + + @classmethod + def has_role(cls, user_id: str, role_name: str, scope: str, team_id: Optional[str] = None) -> bool: + try: + user_roles = cls.get_user_roles(user_id, scope, team_id) + return any(role["role_name"] == role_name for role in user_roles) + except Exception: + return False + + @classmethod + def assign_default_team_role(cls, user_id: str, team_id: str) -> bool: + return cls.assign_role(user_id, DEFAULT_TEAM_ROLE, "TEAM", team_id) + + @classmethod + def assign_team_owner(cls, user_id: str, team_id: str) -> bool: + return cls.assign_role(user_id, RoleName.OWNER.value, "TEAM", team_id) + + @classmethod + def get_valid_roles_for_scope(cls, scope: str) -> List[str]: + """Get all valid role names for a given scope.""" + return VALID_ROLE_NAMES_BY_SCOPE.get(scope, []) + + @classmethod + def get_team_users_with_roles(cls, team_id: str) -> List[Dict[str, Any]]: + """Get all users in a team with their roles.""" + try: + from todo.repositories.user_repository import UserRepository + + user_roles = UserRoleRepository.get_user_roles(user_id=None, scope=RoleScope.TEAM, team_id=team_id) + + users_roles_map = {} + for role in user_roles: + user_id = role.user_id + role_data = { + "role_id": str(role.id), + "role_name": role.role_name.value if hasattr(role.role_name, "value") else role.role_name, + } + + if user_id not in users_roles_map: + users_roles_map[user_id] = [] + users_roles_map[user_id].append(role_data) + + team_users = [] + for user_id, roles in users_roles_map.items(): + user = UserRepository.get_by_id(user_id) + if user: + team_users.append({"user_id": user_id, "user_name": user.name, "roles": roles}) + + return team_users + except Exception as e: + logger.error(f"Failed to get team users with roles: {str(e)}") + return [] diff --git a/todo/services/user_service.py b/todo/services/user_service.py new file mode 100644 index 00000000..81b57c62 --- /dev/null +++ b/todo/services/user_service.py @@ -0,0 +1,130 @@ +from todo.models.user import UserModel +from todo.repositories.user_repository import UserRepository +from todo.constants.messages import ValidationErrors, RepositoryErrors +from todo.exceptions.auth_exceptions import ( + UserNotFoundException, + APIException, +) +from rest_framework.exceptions import ValidationError as DRFValidationError +from typing import List, Tuple +from todo.dto.user_dto import UserDTO, UsersDTO +from todo.repositories.task_assignment_repository import TaskAssignmentRepository + + +class UserService: + @classmethod + def create_or_update_user(cls, google_user_data: dict) -> UserModel: + try: + cls._validate_google_user_data(google_user_data) + return UserRepository.create_or_update(google_user_data) + except (UserNotFoundException, APIException, DRFValidationError): + raise + except Exception as e: + raise APIException(RepositoryErrors.USER_CREATE_UPDATE_FAILED.format(str(e))) from e + + @classmethod + def get_user_by_id(cls, user_id: str) -> UserModel: + user = UserRepository.get_by_id(user_id) + if not user: + raise UserNotFoundException() + return user + + @classmethod + def search_users(cls, query: str, page: int = 1, limit: int = 10) -> Tuple[List[UserModel], int]: + """ + Search users by name or email using fuzzy search + """ + cls._validate_search_params(query, page, limit) + return UserRepository.search_users(query, page, limit) + + @classmethod + def get_users_by_ids(cls, user_ids: list[str]) -> list[UserDTO]: + users = [] + for user_id in user_ids: + user = UserRepository.get_by_id(user_id) + if user: + users.append( + UserDTO( + id=str(user.id), + name=user.name, + email_id=user.email_id, + created_at=user.created_at, + updated_at=user.updated_at, + ) + ) + return users + + @classmethod + def get_users_by_team_id(cls, team_id: str) -> list[UserDTO]: + from todo.repositories.team_repository import UserTeamDetailsRepository + + users_and_added_on = UserTeamDetailsRepository.get_users_and_added_on_by_team_id(team_id) + user_ids = [entry["user_id"] for entry in users_and_added_on] + added_on_map = {entry["user_id"]: entry["added_on"] for entry in users_and_added_on} + users = cls.get_users_by_ids(user_ids) + # Attach addedOn to each user dto + for user in users: + user.addedOn = added_on_map.get(user.id) + # Compute tasksAssignedCount: tasks assigned to both user and team + user_task_ids = set( + [str(assignment.task_id) for assignment in TaskAssignmentRepository.get_by_assignee_id(user.id, "user")] + ) + team_task_ids = set( + [str(assignment.task_id) for assignment in TaskAssignmentRepository.get_by_assignee_id(team_id, "team")] + ) + user.tasksAssignedCount = len(user_task_ids & team_task_ids) + return users + + @classmethod + def _validate_google_user_data(cls, google_user_data: dict) -> None: + validation_errors = {} + + if not google_user_data.get("google_id"): + validation_errors["google_id"] = ValidationErrors.MISSING_GOOGLE_ID + + if not google_user_data.get("email"): + validation_errors["email"] = ValidationErrors.MISSING_EMAIL + + if not google_user_data.get("name"): + validation_errors["name"] = ValidationErrors.MISSING_NAME + + if not google_user_data.get("picture"): + validation_errors["picture"] = ValidationErrors.MISSING_PICTURE + + if validation_errors: + raise DRFValidationError(validation_errors) + + @classmethod + def _validate_search_params(cls, query: str, page: int, limit: int) -> None: + validation_errors = {} + + if not query or not query.strip(): + validation_errors["query"] = "Search query cannot be empty" + + if page < 1: + validation_errors["page"] = ValidationErrors.PAGE_POSITIVE + + if limit < 1: + validation_errors["limit"] = ValidationErrors.LIMIT_POSITIVE + + if limit > 100: + validation_errors["limit"] = ValidationErrors.MAX_LIMIT_EXCEEDED.format(100) + + if validation_errors: + raise DRFValidationError(validation_errors) + + @classmethod + def get_all_users(cls, page: int = 1, limit: int = 10) -> tuple[List[UsersDTO], int]: + """ + Get all users with pagination + """ + users, total_count = UserRepository.get_all_users(page, limit) + user_dtos = [ + UsersDTO( + id=str(user.id), + name=user.name, + ) + for user in users + ] + + return user_dtos, total_count diff --git a/todo/services/watchlist_service.py b/todo/services/watchlist_service.py new file mode 100644 index 00000000..683972bb --- /dev/null +++ b/todo/services/watchlist_service.py @@ -0,0 +1,208 @@ +from datetime import datetime, timezone +from django.conf import settings +from django.urls import reverse_lazy +from urllib.parse import urlencode +import math + +from todo.constants.task import TaskStatus +from todo.dto.label_dto import LabelDTO +from todo.dto.responses.paginated_response import LinksData +from todo.dto.watchlist_dto import CreateWatchlistDTO, UpdateWatchlistDTO, WatchlistDTO +from todo.dto.responses.create_watchlist_response import CreateWatchlistResponse +from todo.dto.responses.get_watchlist_task_response import GetWatchlistTasksResponse +from todo.exceptions.task_exceptions import TaskNotFoundException +from todo.models.watchlist import WatchlistModel +from todo.repositories.label_repository import LabelRepository +from todo.repositories.watchlist_repository import WatchlistRepository +from todo.constants.messages import ApiErrors +from todo.dto.responses.error_response import ApiErrorResponse, ApiErrorDetail, ApiErrorSource +from todo.utils.task_validation_utils import validate_task_exists +from bson import ObjectId + + +class PaginationConfig: + DEFAULT_PAGE: int = 1 + DEFAULT_LIMIT: int = settings.REST_FRAMEWORK["DEFAULT_PAGINATION_SETTINGS"]["DEFAULT_PAGE_LIMIT"] + MAX_LIMIT: int = settings.REST_FRAMEWORK["DEFAULT_PAGINATION_SETTINGS"]["MAX_PAGE_LIMIT"] + + +class WatchlistService: + @classmethod + def get_watchlisted_tasks( + cls, + page: int, + limit: int, + user_id: str, + ) -> GetWatchlistTasksResponse: + try: + count, tasks = WatchlistRepository.get_watchlisted_tasks(page, limit, user_id) + + if not tasks: + return GetWatchlistTasksResponse(tasks=[], links=None) + + watchlisted_task_dtos = [cls.prepare_watchlisted_task_dto(task) for task in tasks] + + links = cls._build_pagination_links(page, limit, count) + + return GetWatchlistTasksResponse(tasks=watchlisted_task_dtos, links=links) + + except Exception as e: + raise ValueError( + ApiErrorResponse( + statusCode=500, + message=ApiErrors.SERVER_ERROR, + errors=[ + ApiErrorDetail( + source={ApiErrorSource.PARAMETER: "server"}, + title=ApiErrors.UNEXPECTED_ERROR, + detail=str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR, + ) + ], + ) + ) + + @classmethod + def add_task(cls, dto: CreateWatchlistDTO) -> CreateWatchlistResponse: + try: + # Validate that task exists using common function + validate_task_exists(dto.taskId) + + existing = WatchlistRepository.get_by_user_and_task(dto.userId, dto.taskId) + if existing: + raise ValueError( + ApiErrorResponse( + statusCode=400, + message=ApiErrors.TASK_ALREADY_IN_WATCHLIST, + errors=[ + ApiErrorDetail( + source={ApiErrorSource.PARAMETER: "taskId"}, + title=ApiErrors.VALIDATION_ERROR, + detail=ApiErrors.TASK_ALREADY_IN_WATCHLIST, + ) + ], + ) + ) + + watchlist_model = WatchlistModel( + taskId=dto.taskId, + userId=dto.userId, + createdBy=dto.createdBy, + createdAt=datetime.now(timezone.utc), + ) + created_watchlist = WatchlistRepository.create(watchlist_model) + watchlist_dto = CreateWatchlistDTO( + taskId=created_watchlist.taskId, + userId=created_watchlist.userId, + createdBy=created_watchlist.createdBy, + createdAt=created_watchlist.createdAt, + ) + return CreateWatchlistResponse(data=watchlist_dto) + + except ValueError as e: + if isinstance(e.args[0], ApiErrorResponse): + raise e + raise ValueError( + ApiErrorResponse( + statusCode=500, + message=ApiErrors.REPOSITORY_ERROR, + errors=[ + ApiErrorDetail( + source={ApiErrorSource.PARAMETER: "task_repository"}, + title=ApiErrors.UNEXPECTED_ERROR, + detail=str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR, + ) + ], + ) + ) + except Exception as e: + raise ValueError( + ApiErrorResponse( + statusCode=500, + message=ApiErrors.SERVER_ERROR, + errors=[ + ApiErrorDetail( + source={ApiErrorSource.PARAMETER: "server"}, + title=ApiErrors.UNEXPECTED_ERROR, + detail=str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR, + ) + ], + ) + ) + + @classmethod + def update_task(cls, taskId: ObjectId, dto: UpdateWatchlistDTO, userId: ObjectId) -> CreateWatchlistResponse: + validate_task_exists(str(taskId)) + + updated_watchlist = WatchlistRepository.update(taskId, dto["isActive"], userId) + if not updated_watchlist: + raise TaskNotFoundException(taskId) + + @classmethod + def _prepare_label_dtos(cls, label_ids: list[str]) -> list[LabelDTO]: + object_ids = [ObjectId(id) for id in label_ids] # Convert here! + label_models = LabelRepository.list_by_ids(object_ids) + + return [ + LabelDTO( + id=str(label_model.id), + name=label_model.name, + color=label_model.color, + ) + for label_model in label_models + ] + + @classmethod + def prepare_watchlisted_task_dto(cls, watchlist_model: WatchlistDTO) -> WatchlistDTO: + labels = cls._prepare_label_dtos(watchlist_model.labels) if watchlist_model.labels else [] + + # Handle assignee data if present + assignee = None + if hasattr(watchlist_model, "assignee") and watchlist_model.assignee: + assignee = watchlist_model.assignee + + task_status = watchlist_model.status + + if watchlist_model.deferredDetails and watchlist_model.deferredDetails.deferredTill > datetime.now( + timezone.utc + ): + task_status = TaskStatus.DEFERRED.value + + return WatchlistDTO( + taskId=str(watchlist_model.taskId), + displayId=watchlist_model.displayId, + title=watchlist_model.title, + description=watchlist_model.description, + isAcknowledged=watchlist_model.isAcknowledged, + isDeleted=watchlist_model.isDeleted, + labels=labels, + dueAt=watchlist_model.dueAt, + deferredDetails=watchlist_model.deferredDetails, + status=task_status, + priority=watchlist_model.priority, + createdAt=watchlist_model.createdAt, + createdBy=watchlist_model.createdBy, + watchlistId=watchlist_model.watchlistId, + assignee=assignee, + ) + + @classmethod + def _build_pagination_links(cls, page: int, limit: int, total_count: int) -> LinksData: + """Build pagination links with sort parameters""" + + total_pages = math.ceil(total_count / limit) + next_link = None + prev_link = None + + if page < total_pages: + next_link = cls.build_page_url(page + 1, limit) + + if page > 1: + prev_link = cls.build_page_url(page - 1, limit) + + return LinksData(next=next_link, prev=prev_link) + + @classmethod + def build_page_url(cls, page: int, limit: int) -> str: + base_url = reverse_lazy("watchlist") + query_params = urlencode({"page": page, "limit": limit}) + return f"{base_url}?{query_params}" diff --git a/todo/tests/fixtures/label.py b/todo/tests/fixtures/label.py index 05de1401..8ad805f7 100644 --- a/todo/tests/fixtures/label.py +++ b/todo/tests/fixtures/label.py @@ -8,14 +8,14 @@ "name": "Label 1", "color": "#fa1e4e", "createdAt": "2024-11-08T10:14:35", - "createdBy": "qMbT6M2GB65W7UHgJS4g", + "createdBy": str(ObjectId()), }, { "_id": ObjectId("67588c1ac2195684a575840c"), "name": "Label 2", "color": "#ea1e4e", "createdAt": "2024-11-08T10:14:35", - "createdBy": "qMbT6M2GB65W7UHgJS4g", + "createdBy": str(ObjectId()), }, ] diff --git a/todo/tests/fixtures/task.py b/todo/tests/fixtures/task.py index 2ec70ea9..db84bb77 100644 --- a/todo/tests/fixtures/task.py +++ b/todo/tests/fixtures/task.py @@ -3,6 +3,8 @@ from todo.constants.task import TaskStatus from todo.dto.task_dto import TaskDTO from bson import ObjectId +from todo.dto.task_assignment_dto import TaskAssignmentDTO +from datetime import datetime tasks_db_data = [ { @@ -16,6 +18,7 @@ "isAcknowledged": True, "labels": [ObjectId("67588c1ac2195684a575840c"), ObjectId("67478036eac9d93db7f59c35")], "createdAt": "2024-11-08T10:14:35", + "isDeleted": False, "updatedAt": "2024-11-08T15:14:35", "createdBy": "qMbT6M2GB65W7UHgJS4g", "updatedBy": "qMbT6M2GB65W7UHgJS4g", @@ -47,9 +50,18 @@ title="created rest api", priority=1, status="TODO", - assignee={"id": "qMbT6M2GB65W7UHgJS4g", "name": "SYSTEM"}, + assignee=TaskAssignmentDTO( + id="assignment-1", + task_id="672f7c5b775ee9f4471ff1dd", + assignee_id="qMbT6M2GB65W7UHgJS4g", + user_type="user", + is_active=True, + created_by="xQ1CkCncM8Novk252oAj", + created_at=datetime(2024, 11, 9, 15, 14, 35, 724000), + assignee_name="SYSTEM", + ), isAcknowledged=False, - labels=[{"name": "Beginner Friendly", "color": "#fa1e4e"}], + labels=[{"id": "label-1", "name": "Beginner Friendly", "color": "#fa1e4e"}], isDeleted=False, startedAt="2024-11-09T15:14:35.724000", dueAt="2024-11-09T15:14:35.724000", @@ -64,9 +76,18 @@ title="task 2", priority=1, status="TODO", - assignee={"id": "qMbT6M2GB65W7UHgJS4g", "name": "SYSTEM"}, + assignee=TaskAssignmentDTO( + id="assignment-2", + task_id="674c726ca89aab38040cb964", + assignee_id="qMbT6M2GB65W7UHgJS4g", + user_type="user", + is_active=True, + created_by="xQ1CkCncM8Novk252oAj", + created_at=datetime(2024, 11, 9, 15, 14, 35, 724000), + assignee_name="SYSTEM", + ), isAcknowledged=True, - labels=[{"name": "Beginner Friendly", "color": "#fa1e4e"}], + labels=[{"id": "label-1", "name": "Beginner Friendly", "color": "#fa1e4e"}], isDeleted=False, startedAt="2024-11-09T15:14:35.724000", dueAt="2024-11-09T15:14:35.724000", diff --git a/todo/tests/fixtures/user.py b/todo/tests/fixtures/user.py new file mode 100644 index 00000000..e545764d --- /dev/null +++ b/todo/tests/fixtures/user.py @@ -0,0 +1,30 @@ +from datetime import datetime, timezone + +from bson import ObjectId + +users_db_data = [ + { + "google_id": "123456789", + "email_id": "test@example.com", + "name": "Test User", + "picture": "https://example.com/picture1.jpg", + "created_at": datetime.now(timezone.utc), + "updated_at": datetime.now(timezone.utc), + }, + { + "google_id": "987654321", + "email_id": "another@example.com", + "name": "Another User", + "picture": "https://example.com/picture2.jpg", + "created_at": datetime.now(timezone.utc), + "updated_at": datetime.now(timezone.utc), + }, +] + +google_auth_user_payload = { + "user_id": str(ObjectId()), + "google_id": "test_google_id", + "email": "test@example.com", + "name": "Test User", + "picture": "https://example.com/test_picture.jpg", +} diff --git a/todo/tests/integration/base_mongo_test.py b/todo/tests/integration/base_mongo_test.py new file mode 100644 index 00000000..21bbe2d7 --- /dev/null +++ b/todo/tests/integration/base_mongo_test.py @@ -0,0 +1,85 @@ +from datetime import datetime, timezone +from bson import ObjectId +from django.test import TransactionTestCase, override_settings +from django.conf import settings +from pymongo import MongoClient +from todo.models.user import UserModel +from todo.tests.testcontainers.shared_mongo import get_shared_mongo_container +from todo.utils.jwt_utils import generate_token_pair +from todo_project.db.config import DatabaseManager +from rest_framework.test import APIClient +from todo.tests.fixtures.user import google_auth_user_payload + + +class BaseMongoTestCase(TransactionTestCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.mongo_container = get_shared_mongo_container() + cls.mongo_url = cls.mongo_container.get_connection_url() + cls.mongo_client = MongoClient(cls.mongo_url) + cls.db = cls.mongo_client.get_database("testdb") + + cls.override = override_settings( + MONGODB_URI=cls.mongo_url, DB_NAME="testdb", FRONTEND_URL="http://localhost:3000" + ) + cls.override.enable() + DatabaseManager.reset() + DatabaseManager().get_database() + + def setUp(self): + for collection in self.db.list_collection_names(): + self.db[collection].delete_many({}) + + @classmethod + def tearDownClass(cls): + cls.mongo_client.close() + cls.override.disable() + super().tearDownClass() + + +class AuthenticatedMongoTestCase(BaseMongoTestCase): + def setUp(self): + super().setUp() + self.client = APIClient() + self._create_test_user() + self._set_auth_cookies() + + def _create_test_user(self, userId=None): + if userId is None: + self.user_id = ObjectId() + else: + self.user_id = userId + + self.user_data = { + **google_auth_user_payload, + "user_id": str(self.user_id), + } + + self.db.users.insert_one( + { + "_id": self.user_id, + "google_id": self.user_data["google_id"], + "email_id": self.user_data["email"], + "name": self.user_data["name"], + "picture": self.user_data["picture"], + "createdAt": datetime.now(timezone.utc), + "updatedAt": datetime.now(timezone.utc), + } + ) + + def _set_auth_cookies(self): + tokens = generate_token_pair(self.user_data) + self.client.cookies[settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME")] = tokens["access_token"] + self.client.cookies[settings.COOKIE_SETTINGS.get("REFRESH_COOKIE_NAME")] = tokens["refresh_token"] + + def get_user_model(self) -> UserModel: + return UserModel( + id=self.user_id, + google_id=self.user_data["google_id"], + email_id=self.user_data["email"], + name=self.user_data["name"], + picture=self.user_data["picture"], + createdAt=datetime.now(timezone.utc), + updatedAt=datetime.now(timezone.utc), + ) diff --git a/todo/tests/integration/test_get_labels.py b/todo/tests/integration/test_get_labels.py new file mode 100644 index 00000000..60344fac --- /dev/null +++ b/todo/tests/integration/test_get_labels.py @@ -0,0 +1,120 @@ +from http import HTTPStatus +from django.urls import reverse +from django.conf import settings +from bson import ObjectId + +from todo.constants.messages import ValidationErrors +from todo.tests.fixtures.label import label_db_data +from todo.tests.integration.base_mongo_test import AuthenticatedMongoTestCase +from todo.constants.messages import ApiErrors + + +class LabelListAPIIntegrationTest(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + self.db.labels.delete_many({}) + self.label_docs = [] + + for label in label_db_data: + label_doc = label.copy() + label_doc["_id"] = label_doc.pop("id") if "id" in label_doc else ObjectId() + self.db.labels.insert_one(label_doc) + self.label_docs.append(label_doc) + + self.url = reverse("labels") + + def test_get_labels_success(self): + response = self.client.get(self.url) + self.assertEqual(response.status_code, HTTPStatus.OK) + + data = response.json() + self.assertEqual(len(data["labels"]), len(self.label_docs)) + self.assertEqual(data["total"], len(self.label_docs)) + + for actual_label, expected_label in zip(data["labels"], self.label_docs): + self.assertEqual(actual_label["name"], expected_label["name"]) + self.assertEqual(actual_label["color"], expected_label["color"]) + + def test_get_labels_with_search_match(self): + keyword = self.label_docs[0]["name"][:3] + response = self.client.get(self.url, {"search": keyword}) + self.assertEqual(response.status_code, HTTPStatus.OK) + + data = response.json() + self.assertGreater(len(data["labels"]), 0) + self.assertTrue(any(keyword.lower() in label["name"].lower() for label in data["labels"])) + + def test_get_labels_with_search_no_match(self): + response = self.client.get(self.url, {"search": "no-match-keyword-xyz"}) + self.assertEqual(response.status_code, HTTPStatus.OK) + + data = response.json() + self.assertEqual(data["labels"], []) + self.assertEqual(data["total"], 0) + + def test_get_labels_with_invalid_pagination(self): + response = self.client.get(self.url, {"page": 99999, "limit": 10}) + self.assertEqual(response.status_code, HTTPStatus.OK) + + data = response.json() + self.assertEqual(data["labels"], []) + self.assertIsNotNone(data["error"]) + self.assertEqual(data["error"]["message"], ApiErrors.PAGE_NOT_FOUND) + self.assertEqual(data["error"]["code"], "PAGE_NOT_FOUND") + + def test_get_labels_uses_default_pagination(self): + response = self.client.get(self.url) + self.assertEqual(response.status_code, HTTPStatus.OK) + + data = response.json() + self.assertIn("page", data) + self.assertIn("limit", data) + self.assertEqual(data["page"], 1) + self.assertEqual(data["limit"], 10) + + def test_get_labels_invalid_limit_type_query_param(self): + response = self.client.get(self.url, {"limit": "invalid"}) + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + + data = response.json() + self.assertEqual(data["statusCode"], 400) + self.assertEqual(data["errors"][0]["source"]["parameter"], "limit") + self.assertIn("A valid integer is required.", data["errors"][0]["detail"]) + + def test_get_labels_invalid_label_query_param(self): + response = self.client.get(self.url, {"limit": 0}) + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + + data = response.json() + self.assertEqual(data["statusCode"], 400) + self.assertEqual(data["errors"][0]["source"]["parameter"], "limit") + self.assertIn(ValidationErrors.LIMIT_POSITIVE, data["errors"][0]["detail"]) + + def test_get_labels_greater_than_max_limit_query_param(self): + response = self.client.get(self.url, {"limit": 1000}) + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + + MAX_PAGE_LIMIT = settings.REST_FRAMEWORK["DEFAULT_PAGINATION_SETTINGS"]["MAX_PAGE_LIMIT"] + + data = response.json() + self.assertEqual(data["statusCode"], 400) + self.assertEqual(data["errors"][0]["source"]["parameter"], "limit") + self.assertIn(f"Ensure this value is less than or equal to {MAX_PAGE_LIMIT}.", data["errors"][0]["detail"]) + + def test_get_labels_invalid_page_type_query_param(self): + response = self.client.get(self.url, {"page": "invalid"}) + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + + data = response.json() + self.assertEqual(data["statusCode"], 400) + self.assertEqual(data["errors"][0]["source"]["parameter"], "page") + self.assertIn("A valid integer is required.", data["errors"][0]["detail"]) + + def test_get_labels_invalid_page_query_param(self): + response = self.client.get(self.url, {"page": 0}) + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + + data = response.json() + self.assertEqual(data["statusCode"], 400) + self.assertEqual(data["errors"][0]["source"]["parameter"], "page") + self.assertIn(ValidationErrors.PAGE_POSITIVE, data["errors"][0]["detail"]) diff --git a/todo/tests/integration/test_task_defer_api.py b/todo/tests/integration/test_task_defer_api.py new file mode 100644 index 00000000..c20393be --- /dev/null +++ b/todo/tests/integration/test_task_defer_api.py @@ -0,0 +1,144 @@ +from datetime import datetime, timedelta, timezone +from http import HTTPStatus +from bson import ObjectId +from django.urls import reverse +from todo.constants.messages import ApiErrors, ValidationErrors +from todo.constants.task import TaskPriority, TaskStatus +from todo.tests.integration.base_mongo_test import AuthenticatedMongoTestCase +from todo.tests.fixtures.task import tasks_db_data + + +class TaskDeferAPIIntegrationTest(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + self.db.tasks.delete_many({}) + self.db.task_details.delete_many({}) + + def _insert_task(self, *, status: str = TaskStatus.TODO.value, due_at: datetime | None = None) -> str: + task_fixture = tasks_db_data[0].copy() + new_id = ObjectId() + task_fixture["_id"] = new_id + task_fixture.pop("id", None) + task_fixture["displayId"] = "#IT-DEF" + task_fixture["status"] = status + # Remove assignee from task document since it's now in separate collection + task_fixture.pop("assignee", None) + task_fixture["createdBy"] = str(self.user_id) + task_fixture["priority"] = TaskPriority.MEDIUM.value + task_fixture["createdAt"] = datetime.now(timezone.utc) + if due_at: + task_fixture["dueAt"] = due_at + else: + task_fixture.pop("dueAt", None) + + self.db.tasks.insert_one(task_fixture) + + # Create assignee task details in separate collection + assignee_details = { + "_id": ObjectId(), + "assignee_id": ObjectId(self.user_id), + "task_id": new_id, + "user_type": "user", + "is_action_taken": False, + "is_active": True, + "created_by": ObjectId(self.user_id), + "updated_by": None, + "created_at": datetime.now(timezone.utc), + "updated_at": None, + } + self.db.task_details.insert_one(assignee_details) + + return str(new_id) + + def test_defer_task_success(self): + now = datetime.now(timezone.utc) + due_at = now + timedelta(days=30) + task_id = self._insert_task(due_at=due_at) + deferred_till = now + timedelta(days=10) + + url = reverse("task_detail", args=[task_id]) + "?action=defer" + response = self.client.patch(url, data={"deferredTill": deferred_till.isoformat()}, format="json") + self.assertEqual(response.status_code, HTTPStatus.OK) + response_data = response.json() + self.assertIn("deferredDetails", response_data) + self.assertIsNotNone(response_data["deferredDetails"]) + raw_dt_str = response_data["deferredDetails"]["deferredTill"] + + if raw_dt_str.endswith("Z"): + raw_dt_str = raw_dt_str.replace("Z", "+00:00") + + response_deferred_till = datetime.fromisoformat(raw_dt_str) + + if response_deferred_till.tzinfo is None: + response_deferred_till = response_deferred_till.replace(tzinfo=timezone.utc) + + self.assertTrue(abs(response_deferred_till - deferred_till) < timedelta(seconds=1)) + + def test_defer_task_too_close_to_due_date_returns_422(self): + now = datetime.now(timezone.utc) + due_at = now + timedelta(days=5) + task_id = self._insert_task(due_at=due_at) + + deferred_till = due_at + timedelta(days=1) + + url = reverse("task_detail", args=[task_id]) + "?action=defer" + response = self.client.patch(url, data={"deferredTill": deferred_till.isoformat()}, format="json") + + self.assertEqual(response.status_code, HTTPStatus.UNPROCESSABLE_ENTITY) + response_json = response.json() + self.assertEqual(response_json["statusCode"], HTTPStatus.UNPROCESSABLE_ENTITY) + self.assertEqual(response_json["message"], ValidationErrors.CANNOT_DEFER_TOO_CLOSE_TO_DUE_DATE) + error = response_json["errors"][0] + self.assertEqual(error["title"], ApiErrors.VALIDATION_ERROR) + self.assertEqual(error["detail"], ValidationErrors.CANNOT_DEFER_TOO_CLOSE_TO_DUE_DATE) + self.assertEqual(error["source"]["parameter"], "deferredTill") + + def test_defer_done_task_returns_409(self): + task_id = self._insert_task(status=TaskStatus.DONE.value) + deferred_till = datetime.now(timezone.utc) + timedelta(days=5) + + url = reverse("task_detail", args=[task_id]) + "?action=defer" + response = self.client.patch(url, data={"deferredTill": deferred_till.isoformat()}, format="json") + + self.assertEqual(response.status_code, HTTPStatus.CONFLICT) + response_data = response.json() + self.assertEqual(response_data["statusCode"], HTTPStatus.CONFLICT) + self.assertEqual(response_data["message"], ValidationErrors.CANNOT_DEFER_A_DONE_TASK) + error = response_data["errors"][0] + self.assertEqual(error["title"], ApiErrors.STATE_CONFLICT_TITLE) + self.assertEqual(error["detail"], ValidationErrors.CANNOT_DEFER_A_DONE_TASK) + self.assertEqual(error["source"]["path"], "task_id") + + def test_defer_task_with_invalid_date_format_returns_400(self): + task_id = self._insert_task() + url = reverse("task_detail", args=[task_id]) + "?action=defer" + response = self.client.patch(url, data={"deferredTill": "invalid-date-format"}, format="json") + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + response_data = response.json() + self.assertEqual(response_data["errors"][0]["source"]["parameter"], "deferredTill") + + def test_defer_task_with_missing_date_returns_400(self): + task_id = self._insert_task() + url = reverse("task_detail", args=[task_id]) + "?action=defer" + response = self.client.patch(url, data={}, format="json") + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + response_data = response.json() + self.assertEqual(response_data["errors"][0]["source"]["parameter"], "deferredTill") + self.assertIn("required", response_data["errors"][0]["detail"]) + + def test_defer_task_unauthorized(self): + now = datetime.now(timezone.utc) + due_at = now + timedelta(days=30) + task_id = self._insert_task(due_at=due_at) + deferred_till = now + timedelta(days=10) + url = reverse("task_detail", args=[task_id]) + "?action=defer" + other_user_id = ObjectId() + self._create_test_user(other_user_id) + self._set_auth_cookies() + + response = self.client.patch(url, data={"deferredTill": deferred_till.isoformat()}, format="json") + self.assertEqual(response.status_code, HTTPStatus.FORBIDDEN) + response_data = response.json() + self.assertEqual(response_data["message"], ApiErrors.UNAUTHORIZED_TITLE) + err = response_data["errors"][0] + self.assertEqual(err["title"], ApiErrors.UNAUTHORIZED_TITLE) diff --git a/todo/tests/integration/test_task_detail_api.py b/todo/tests/integration/test_task_detail_api.py new file mode 100644 index 00000000..454f0d0a --- /dev/null +++ b/todo/tests/integration/test_task_detail_api.py @@ -0,0 +1,80 @@ +from http import HTTPStatus +from django.urls import reverse +from bson import ObjectId +from datetime import datetime, timezone +from todo.tests.fixtures.task import tasks_db_data +from todo.tests.integration.base_mongo_test import AuthenticatedMongoTestCase +from todo.constants.messages import ApiErrors, ValidationErrors + + +class TaskDetailAPIIntegrationTest(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + self.db.tasks.delete_many({}) + self.db.task_details.delete_many({}) + + self.task_doc = tasks_db_data[1].copy() + self.task_doc["_id"] = self.task_doc.pop("id") + # Remove assignee from task document since it's now in separate collection + self.task_doc.pop("assignee", None) + self.task_doc["createdBy"] = str(self.user_id) + self.task_doc["updatedBy"] = str(self.user_id) + self.db.tasks.insert_one(self.task_doc) + + # Create assignee task details in separate collection + assignee_details = { + "_id": str(ObjectId()), + "assignee_id": str(self.user_id), + "task_id": str(self.task_doc["_id"]), + "user_type": "user", + "is_active": True, + "created_by": str(self.user_id), + "updated_by": None, + "created_at": datetime.now(timezone.utc), + "updated_at": None, + } + self.db.task_details.insert_one(assignee_details) + + self.existing_task_id = str(self.task_doc["_id"]) + self.non_existent_id = str(ObjectId()) + self.invalid_task_id = "invalid-task-id" + + def test_get_task_by_id_success(self): + url = reverse("task_detail", args=[self.existing_task_id]) + response = self.client.get(url) + self.assertEqual(response.status_code, HTTPStatus.OK) + data = response.json()["data"] + self.assertEqual(data["id"], self.existing_task_id) + self.assertEqual(data["title"], self.task_doc["title"]) + self.assertEqual(data["priority"], "MEDIUM") + self.assertEqual(data["status"], self.task_doc["status"]) + self.assertEqual(data["displayId"], self.task_doc["displayId"]) + self.assertEqual(data["createdBy"]["id"], self.task_doc["createdBy"]) + # Check that assignee details are included + self.assertIsNotNone(data["assignee"]) + self.assertEqual(data["assignee"]["assignee_id"], str(self.user_id)) + self.assertEqual(data["assignee"]["user_type"], "user") + + def test_get_task_by_id_not_found(self): + url = reverse("task_detail", args=[self.non_existent_id]) + response = self.client.get(url) + self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) + + data = response.json() + error_message = ApiErrors.TASK_NOT_FOUND.format(self.non_existent_id) + self.assertEqual(data["message"], error_message) + error = data["errors"][0] + self.assertEqual(error["source"]["path"], "task_id") + self.assertEqual(error["title"], ApiErrors.RESOURCE_NOT_FOUND_TITLE) + self.assertEqual(error["detail"], error_message) + + def test_get_task_by_id_invalid_format(self): + url = reverse("task_detail", args=[self.invalid_task_id]) + response = self.client.get(url) + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + data = response.json() + self.assertEqual(data["statusCode"], 400) + self.assertEqual(data["message"], ValidationErrors.INVALID_TASK_ID_FORMAT) + self.assertEqual(data["errors"][0]["source"]["path"], "task_id") + self.assertEqual(data["errors"][0]["title"], ApiErrors.VALIDATION_ERROR) + self.assertEqual(data["errors"][0]["detail"], ValidationErrors.INVALID_TASK_ID_FORMAT) diff --git a/todo/tests/integration/test_task_profile_api.py b/todo/tests/integration/test_task_profile_api.py new file mode 100644 index 00000000..44f70de7 --- /dev/null +++ b/todo/tests/integration/test_task_profile_api.py @@ -0,0 +1,23 @@ +from http import HTTPStatus +from django.urls import reverse +from todo.tests.integration.base_mongo_test import AuthenticatedMongoTestCase + + +class TaskProfileAPIIntegrationTest(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + self.url = reverse("tasks") + self.db.tasks.delete_many({}) + # Remove manual user insertion; AuthenticatedMongoTestCase already creates the user + + def test_get_tasks_profile_true_requires_auth(self): + client = self.client.__class__() + response = client.get(self.url + "?profile=true") + self.assertEqual(response.status_code, HTTPStatus.UNAUTHORIZED) + + def test_get_tasks_profile_true_empty_for_no_tasks(self): + self.db.tasks.delete_many({}) + response = self.client.get(self.url + "?profile=true") + self.assertEqual(response.status_code, HTTPStatus.OK) + data = response.json() + self.assertEqual(data["tasks"], []) diff --git a/todo/tests/integration/test_task_sorting_integration.py b/todo/tests/integration/test_task_sorting_integration.py new file mode 100644 index 00000000..2f431f72 --- /dev/null +++ b/todo/tests/integration/test_task_sorting_integration.py @@ -0,0 +1,154 @@ +from unittest.mock import patch, Mock +from rest_framework import status +from todo.tests.integration.base_mongo_test import AuthenticatedMongoTestCase +from todo.constants.task import ( + SORT_FIELD_PRIORITY, + SORT_FIELD_DUE_AT, + SORT_FIELD_CREATED_AT, + SORT_FIELD_UPDATED_AT, + SORT_FIELD_ASSIGNEE, + SORT_ORDER_ASC, + SORT_ORDER_DESC, +) + + +class TaskSortingIntegrationTest(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + + @patch("todo.repositories.task_repository.TaskRepository.count") + @patch("todo.repositories.task_repository.TaskRepository.list") + def test_priority_sorting_integration(self, mock_list, mock_count): + mock_list.return_value = [] + mock_count.return_value = 0 + + response = self.client.get("/v1/tasks", {"sort_by": "priority", "order": "desc"}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + mock_list.assert_called_with( + 1, 20, SORT_FIELD_PRIORITY, SORT_ORDER_DESC, str(self.user_id), team_id=None, status_filter=None + ) + + @patch("todo.repositories.task_repository.TaskRepository.count") + @patch("todo.repositories.task_repository.TaskRepository.list") + def test_due_at_default_order_integration(self, mock_list, mock_count): + mock_list.return_value = [] + mock_count.return_value = 0 + + response = self.client.get("/v1/tasks", {"sort_by": "dueAt"}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + mock_list.assert_called_with( + 1, 20, SORT_FIELD_DUE_AT, SORT_ORDER_ASC, str(self.user_id), team_id=None, status_filter=None + ) + + @patch("todo.repositories.task_repository.TaskRepository.count") + @patch("todo.repositories.task_repository.TaskRepository.list") + def test_assignee_sorting_uses_aggregation(self, mock_list, mock_count): + mock_list.return_value = [] + mock_count.return_value = 0 + + response = self.client.get("/v1/tasks", {"sort_by": "assignee", "order": "asc"}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + # Assignee sorting now falls back to createdAt sorting + mock_list.assert_called_once_with( + 1, 20, SORT_FIELD_ASSIGNEE, SORT_ORDER_ASC, str(self.user_id), team_id=None, status_filter=None + ) + + @patch("todo.repositories.task_repository.TaskRepository.count") + @patch("todo.repositories.task_repository.TaskRepository.list") + def test_field_specific_defaults_integration(self, mock_list, mock_count): + mock_list.return_value = [] + mock_count.return_value = 0 + + test_cases = [ + (SORT_FIELD_CREATED_AT, SORT_ORDER_DESC), + (SORT_FIELD_UPDATED_AT, SORT_ORDER_DESC), + (SORT_FIELD_DUE_AT, SORT_ORDER_ASC), + (SORT_FIELD_PRIORITY, SORT_ORDER_DESC), + (SORT_FIELD_ASSIGNEE, SORT_ORDER_ASC), + ] + + for sort_field, expected_order in test_cases: + with self.subTest(sort_field=sort_field, expected_order=expected_order): + mock_list.reset_mock() + mock_count.reset_mock() + + response = self.client.get("/v1/tasks", {"sort_by": sort_field}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + mock_list.assert_called_with( + 1, 20, sort_field, expected_order, str(self.user_id), team_id=None, status_filter=None + ) + + @patch("todo.repositories.task_repository.TaskRepository.count") + @patch("todo.repositories.task_repository.TaskRepository.list") + def test_pagination_with_sorting_integration(self, mock_list, mock_count): + mock_list.return_value = [] + mock_count.return_value = 100 + + response = self.client.get("/v1/tasks", {"page": "3", "limit": "5", "sort_by": "createdAt", "order": "asc"}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + mock_list.assert_called_with( + 3, 5, SORT_FIELD_CREATED_AT, SORT_ORDER_ASC, str(self.user_id), team_id=None, status_filter=None + ) + + def test_invalid_sort_parameters_integration(self): + response = self.client.get("/v1/tasks", {"sort_by": "invalid_field"}) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + response = self.client.get("/v1/tasks", {"sort_by": "priority", "order": "invalid_order"}) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + @patch("todo.repositories.task_repository.TaskRepository.count") + @patch("todo.repositories.task_repository.TaskRepository.list") + def test_default_behavior_integration(self, mock_list, mock_count): + mock_list.return_value = [] + mock_count.return_value = 0 + + response = self.client.get("/v1/tasks") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + mock_list.assert_called_with( + 1, 20, SORT_FIELD_UPDATED_AT, SORT_ORDER_DESC, str(self.user_id), team_id=None, status_filter=None + ) + + @patch("todo.repositories.user_repository.UserRepository.get_by_id") + @patch("todo.services.task_service.reverse_lazy", return_value="/v1/tasks") + @patch("todo.repositories.task_repository.TaskRepository.count") + @patch("todo.repositories.task_repository.TaskRepository.list") + def test_pagination_links_preserve_sort_params_integration( + self, mock_list, mock_count, mock_reverse, mock_user_repo + ): + from todo.tests.fixtures.task import tasks_models + + from todo.models.user import UserModel + + mock_user = Mock(spec=UserModel) + mock_user.email_id = "test@example.com" + mock_user_repo.return_value = mock_user + + mock_list.return_value = [tasks_models[0]] if tasks_models else [] + mock_count.return_value = 3 + + with ( + patch("todo.services.task_service.LabelRepository.list_by_ids", return_value=[]), + ): + response = self.client.get("/v1/tasks", {"page": "2", "limit": "1", "sort_by": "priority", "order": "desc"}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + if response.data.get("links"): + links = response.data["links"] + if links.get("next"): + self.assertIn("sort_by=priority", links["next"]) + self.assertIn("order=desc", links["next"]) + if links.get("prev"): + self.assertIn("sort_by=priority", links["prev"]) + self.assertIn("order=desc", links["prev"]) diff --git a/todo/tests/integration/test_task_update_api.py b/todo/tests/integration/test_task_update_api.py new file mode 100644 index 00000000..f83ecb48 --- /dev/null +++ b/todo/tests/integration/test_task_update_api.py @@ -0,0 +1,108 @@ +from datetime import datetime, timedelta, timezone +from http import HTTPStatus + +from bson import ObjectId +from django.urls import reverse +from todo.constants.messages import ApiErrors, ValidationErrors +from todo.tests.integration.base_mongo_test import AuthenticatedMongoTestCase +from todo.tests.fixtures.task import tasks_db_data + + +class TaskUpdateAPIIntegrationTest(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + self.db.tasks.delete_many({}) + self.db.task_details.delete_many({}) + + doc = tasks_db_data[0].copy() + self.task_id = ObjectId() + doc["_id"] = self.task_id + doc.pop("id", None) + # Remove assignee from task document since it's now in separate collection + doc.pop("assignee", None) + doc["createdBy"] = str(self.user_id) + + doc["createdAt"] = datetime.now(timezone.utc) - timedelta(days=1) + self.db.tasks.insert_one(doc) + + # Create assignee task details in separate collection + assignee_details = { + "_id": ObjectId(), + "assignee_id": ObjectId(self.user_id), + "task_id": self.task_id, + "user_type": "user", + "is_action_taken": False, + "is_active": True, + "created_by": ObjectId(self.user_id), + "updated_by": None, + "created_at": datetime.now(timezone.utc), + "updated_at": None, + } + self.db.task_details.insert_one(assignee_details) + + self.valid_id = str(self.task_id) + self.missing_id = str(ObjectId()) + self.bad_id = "bad-task-id" + + def test_update_task_success(self): + url = reverse("task_detail", args=[self.valid_id]) + payload = { + "title": "Updated Task Title", + "description": "Updated via integration-test.", + "priority": "LOW", + "status": "IN_PROGRESS", + "isAcknowledged": False, + } + res = self.client.patch(url, data=payload, format="json") + self.assertEqual(res.status_code, HTTPStatus.OK) + body = res.json() + self.assertEqual(body["id"], self.valid_id) + self.assertEqual(body["title"], payload["title"]) + self.assertEqual(body["description"], payload["description"]) + self.assertEqual(body["priority"], payload["priority"]) + self.assertEqual(body["status"], payload["status"]) + self.assertEqual(body["isAcknowledged"], payload["isAcknowledged"]) + updated_at = datetime.fromisoformat(body["updatedAt"].replace("Z", "")) + self.assertTrue(datetime.utcnow() - updated_at < timedelta(minutes=1)) + + def test_update_task_not_found(self): + url = reverse("task_detail", args=[self.missing_id]) + res = self.client.patch(url, data={"title": "ghost"}, format="json") + self.assertEqual(res.status_code, HTTPStatus.NOT_FOUND) + msg = ApiErrors.TASK_NOT_FOUND.format(self.missing_id) + self.assertEqual(res.json()["message"], msg) + err = res.json()["errors"][0] + self.assertEqual(err["title"], ApiErrors.RESOURCE_NOT_FOUND_TITLE) + self.assertEqual(err["detail"], msg) + self.assertEqual(err["source"]["path"], "task_id") + + def test_update_task_invalid_id_format(self): + url = reverse("task_detail", args=[self.bad_id]) + res = self.client.patch(url, data={"title": "bad"}, format="json") + self.assertEqual(res.status_code, HTTPStatus.BAD_REQUEST) + body = res.json() + self.assertEqual(body["statusCode"], HTTPStatus.BAD_REQUEST) + self.assertEqual(body["message"], ValidationErrors.INVALID_TASK_ID_FORMAT) + err = body["errors"][0] + self.assertEqual(err["title"], ApiErrors.VALIDATION_ERROR) + self.assertEqual(err["detail"], ValidationErrors.INVALID_TASK_ID_FORMAT) + self.assertEqual(err["source"]["path"], "task_id") + + def test_update_task_unauthorized(self): + other_user_id = ObjectId() + self._create_test_user(other_user_id) + self._set_auth_cookies() + url = reverse("task_detail", args=[self.valid_id]) + payload = { + "title": "Updated Task Title", + "description": "Updated via integration-test.", + "priority": "LOW", + "status": "IN_PROGRESS", + "isAcknowledged": False, + } + res = self.client.patch(url, data=payload, format="json") + self.assertEqual(res.status_code, HTTPStatus.FORBIDDEN) + body = res.json() + self.assertEqual(body["message"], ApiErrors.UNAUTHORIZED_TITLE) + err = body["errors"][0] + self.assertEqual(err["title"], ApiErrors.UNAUTHORIZED_TITLE) diff --git a/todo/tests/integration/test_tasks_delete.py b/todo/tests/integration/test_tasks_delete.py new file mode 100644 index 00000000..4f8cf38f --- /dev/null +++ b/todo/tests/integration/test_tasks_delete.py @@ -0,0 +1,84 @@ +from http import HTTPStatus +from django.urls import reverse +from bson import ObjectId +from datetime import datetime, timezone + +from todo.tests.fixtures.task import tasks_db_data +from todo.tests.integration.base_mongo_test import AuthenticatedMongoTestCase +from todo.constants.messages import ValidationErrors, ApiErrors + + +class TaskDeleteAPIIntegrationTest(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + self.db.tasks.delete_many({}) + self.db.task_details.delete_many({}) + + task_doc = tasks_db_data[0].copy() + task_doc["_id"] = task_doc.pop("id") + # Remove assignee from task document since it's now in separate collection + task_doc.pop("assignee", None) + # Set the task to be created by the test user + task_doc["createdBy"] = str(self.user_id) + task_doc["updatedBy"] = str(self.user_id) + self.db.tasks.insert_one(task_doc) + + # Create assignee task details in separate collection + assignee_details = { + "_id": ObjectId(), + "assignee_id": ObjectId(self.user_data["user_id"]), + "task_id": str(task_doc["_id"]), + "user_type": "user", + "is_action_taken": False, + "is_active": True, + "created_by": ObjectId(self.user_data["user_id"]), + "updated_by": None, + "created_at": datetime.now(timezone.utc), + "updated_at": None, + } + self.db.task_details.insert_one(assignee_details) + + self.existing_task_id = str(task_doc["_id"]) + self.non_existent_id = str(ObjectId()) + self.invalid_task_id = "invalid-task-id" + + def test_delete_task_success(self): + url = reverse("task_detail", args=[self.existing_task_id]) + response = self.client.delete(url) + self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) + + def test_delete_task_not_found(self): + url = reverse("task_detail", args=[self.non_existent_id]) + response = self.client.delete(url) + self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) + response_data = response.json() + error_message = ApiErrors.TASK_NOT_FOUND.format(self.non_existent_id) + self.assertEqual(response_data["message"], error_message) + error = response_data["errors"][0] + self.assertEqual(error["source"]["path"], "task_id") + self.assertEqual(error["title"], ApiErrors.RESOURCE_NOT_FOUND_TITLE) + self.assertEqual(error["detail"], error_message) + + def test_delete_task_invalid_id_format(self): + url = reverse("task_detail", args=[self.invalid_task_id]) + response = self.client.delete(url) + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + data = response.json() + self.assertEqual(data["statusCode"], 400) + self.assertEqual(data["message"], ValidationErrors.INVALID_TASK_ID_FORMAT) + self.assertEqual(data["errors"][0]["source"]["path"], "task_id") + self.assertEqual(data["errors"][0]["title"], ApiErrors.VALIDATION_ERROR) + self.assertEqual(data["errors"][0]["detail"], ValidationErrors.INVALID_TASK_ID_FORMAT) + + def test_delete_task_unauthorized(self): + other_user_id = ObjectId() + + self._create_test_user(other_user_id) + self._set_auth_cookies() + url = reverse("task_detail", args=[self.existing_task_id]) + response = self.client.delete(url) + + self.assertEqual(response.status_code, HTTPStatus.FORBIDDEN) + data = response.json() + self.assertEqual(data["message"], ApiErrors.UNAUTHORIZED_TITLE) + self.assertEqual(data["errors"][0]["title"], ApiErrors.UNAUTHORIZED_TITLE) diff --git a/todo/tests/integration/test_tasks_pagination.py b/todo/tests/integration/test_tasks_pagination.py index cc95e4fa..dc7d22c9 100644 --- a/todo/tests/integration/test_tasks_pagination.py +++ b/todo/tests/integration/test_tasks_pagination.py @@ -1,47 +1,53 @@ -from unittest import TestCase from unittest.mock import patch from django.conf import settings -from rest_framework.test import APIRequestFactory - -from todo.views.task import TaskView +from todo.tests.integration.base_mongo_test import AuthenticatedMongoTestCase from todo.dto.responses.get_tasks_response import GetTasksResponse -from todo.tests.fixtures.task import task_dtos -class TaskPaginationIntegrationTest(TestCase): +class TaskPaginationIntegrationTest(AuthenticatedMongoTestCase): """Integration tests for task pagination settings""" def setUp(self): - self.factory = APIRequestFactory() - self.view = TaskView.as_view() + super().setUp() @patch("todo.services.task_service.TaskService.get_tasks") def test_pagination_settings_integration(self, mock_get_tasks): """Test that the view and serializer correctly use Django settings for pagination""" - mock_get_tasks.return_value = GetTasksResponse(tasks=task_dtos) + mock_get_tasks.return_value = GetTasksResponse(tasks=[], links=None) - # Test with no query params (should use default limit) - request = self.factory.get("/tasks") - response = self.view(request) + default_limit = settings.REST_FRAMEWORK["DEFAULT_PAGINATION_SETTINGS"]["DEFAULT_PAGE_LIMIT"] - # Check serializer validation passed and returned 200 OK - self.assertEqual(response.status_code, 200) + response = self.client.get("/v1/tasks") - default_limit = settings.REST_FRAMEWORK["DEFAULT_PAGINATION_SETTINGS"]["DEFAULT_PAGE_LIMIT"] - mock_get_tasks.assert_called_with(page=1, limit=default_limit) + self.assertEqual(response.status_code, 200) + mock_get_tasks.assert_called_with( + page=1, + limit=default_limit, + sort_by="updatedAt", + order="desc", + user_id=str(self.user_id), + team_id=None, + status_filter=None, + ) mock_get_tasks.reset_mock() - request = self.factory.get("/tasks", {"limit": "10"}) - response = self.view(request) + response = self.client.get("/v1/tasks", {"limit": "10"}) self.assertEqual(response.status_code, 200) - mock_get_tasks.assert_called_with(page=1, limit=10) + mock_get_tasks.assert_called_with( + page=1, + limit=10, + sort_by="updatedAt", + order="desc", + user_id=str(self.user_id), + team_id=None, + status_filter=None, + ) # Verify API rejects values above max limit max_limit = settings.REST_FRAMEWORK["DEFAULT_PAGINATION_SETTINGS"]["MAX_PAGE_LIMIT"] - request = self.factory.get("/tasks", {"limit": str(max_limit + 1)}) - response = self.view(request) + response = self.client.get("/v1/tasks", {"limit": str(max_limit + 1)}) # Should get a 400 error self.assertEqual(response.status_code, 400) diff --git a/todo/tests/integration/test_user_profile_api.py b/todo/tests/integration/test_user_profile_api.py new file mode 100644 index 00000000..c7c20196 --- /dev/null +++ b/todo/tests/integration/test_user_profile_api.py @@ -0,0 +1,21 @@ +from http import HTTPStatus +from django.urls import reverse +from todo.tests.integration.base_mongo_test import AuthenticatedMongoTestCase + + +class UserProfileAPIIntegrationTest(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + self.url = reverse("users") + + def test_user_profile_true_requires_auth(self): + client = self.client.__class__() + response = client.get(self.url + "?profile=true") + self.assertEqual(response.status_code, HTTPStatus.UNAUTHORIZED) + + def test_user_profile_true_returns_user_info(self): + response = self.client.get(self.url + "?profile=true") + self.assertEqual(response.status_code, HTTPStatus.OK) + data = response.json()["data"] + self.assertEqual(data["id"], str(self.user_id)) + self.assertEqual(data["email"], self.user_data["email"]) diff --git a/todo/tests/testcontainers/mongo_container.py b/todo/tests/testcontainers/mongo_container.py new file mode 100644 index 00000000..05f2d2ab --- /dev/null +++ b/todo/tests/testcontainers/mongo_container.py @@ -0,0 +1,47 @@ +import time +import json +from testcontainers.core.generic import DockerContainer +from pymongo import MongoClient +from testcontainers.core.waiting_utils import wait_for_logs + + +class MongoReplicaSetContainer(DockerContainer): + def __init__(self, image: str = "mongo:6.0"): + super().__init__(image=image) + self.with_exposed_ports(27017) + self.with_command(["mongod", "--replSet", "rs0", "--bind_ip_all"]) + self._mongo_url = None + + def start(self): + super().start() + self._container.reload() + mapped_port = self.get_exposed_port(27017) + container_ip = self._container.attrs["NetworkSettings"]["IPAddress"] + member_host = f"{container_ip}:27017" + initiate_js = json.dumps({"_id": "rs0", "members": [{"_id": 0, "host": member_host}]}) + wait_for_logs(self, r"Waiting for connections", timeout=20) + cmd = ["mongosh", "--quiet", "--host", "localhost", "--port", "27017", "--eval", f"rs.initiate({initiate_js})"] + exit_code, output = self.exec(cmd) + if exit_code != 0: + raise RuntimeError( + f"rs.initiate() failed (exit code {exit_code}):\n{output.decode('utf-8', errors='ignore')}" + ) + self._mongo_url = f"mongodb://localhost:{mapped_port}/testdb?directConnection=true" + self._wait_for_primary() + return self + + def get_connection_url(self) -> str: + return self._mongo_url + + def _wait_for_primary(self, timeout=10): + client = MongoClient(self.get_connection_url()) + start = time.time() + while time.time() - start < timeout: + try: + status = client.admin.command("isMaster") + if status.get("ismaster", False): + return + except Exception as e: + print(f"Waiting for PRIMARY: {e}") + time.sleep(0.5) + raise TimeoutError("Timed out waiting for replica set to become PRIMARY.") diff --git a/todo/tests/testcontainers/shared_mongo.py b/todo/tests/testcontainers/shared_mongo.py new file mode 100644 index 00000000..61dbf066 --- /dev/null +++ b/todo/tests/testcontainers/shared_mongo.py @@ -0,0 +1,27 @@ +from todo.tests.testcontainers.mongo_container import MongoReplicaSetContainer +import atexit + +_mongo_container = None + + +def _cleanup_mongo_container(): + global _mongo_container + if _mongo_container is not None: + try: + _mongo_container.stop() + except Exception as e: + print("Failed to stop MongoDB container:", str(e)) + + +def get_shared_mongo_container(): + global _mongo_container + if _mongo_container is None: + try: + _mongo_container = MongoReplicaSetContainer() + _mongo_container.start() + atexit.register(_cleanup_mongo_container) + except Exception as e: + print("Failed to start MongoDB container:", str(e)) + raise + + return _mongo_container diff --git a/todo/tests/unit/exceptions/test_exception_handler.py b/todo/tests/unit/exceptions/test_exception_handler.py index 80dcb71c..ebe0ea6a 100644 --- a/todo/tests/unit/exceptions/test_exception_handler.py +++ b/todo/tests/unit/exceptions/test_exception_handler.py @@ -1,39 +1,116 @@ from unittest import TestCase from unittest.mock import Mock, patch -from rest_framework.exceptions import ValidationError +from rest_framework.exceptions import ValidationError as DRFValidationError from rest_framework.response import Response from rest_framework import status +from rest_framework.views import APIView +from django.conf import settings from todo.exceptions.exception_handler import handle_exception, format_validation_errors from todo.dto.responses.error_response import ApiErrorDetail, ApiErrorSource +from todo.constants.messages import ApiErrors, ValidationErrors +from todo.exceptions.task_exceptions import TaskStateConflictException, UnprocessableEntityException +from bson.errors import InvalidId as BsonInvalidId class ExceptionHandlerTests(TestCase): - @patch("todo.exceptions.exception_handler.format_validation_errors") - def test_returns_400_for_validation_error(self, mock_format_validation_errors: Mock): - validation_error = ValidationError(detail={"field": ["error message"]}) - mock_format_validation_errors.return_value = [ - ApiErrorDetail(detail="error message", source={ApiErrorSource.PARAMETER: "field"}) - ] - - response = handle_exception(validation_error, {}) + def test_returns_400_for_validation_error(self): + error_detail = {"field": ["error message"]} + exception = DRFValidationError(detail=error_detail) + request = Mock() + + with patch("todo.exceptions.exception_handler.format_validation_errors") as mock_format: + mock_format.return_value = [ + ApiErrorDetail(detail="error message", source={ApiErrorSource.PARAMETER: "field"}) + ] + response = handle_exception(exception, {"request": request}) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + expected_response = { + "statusCode": 400, + "message": "error message", + "errors": [{"source": {"parameter": "field"}, "detail": "error message"}], + } + self.assertDictEqual(response.data, expected_response) + mock_format.assert_called_once_with(error_detail) - self.assertIsInstance(response, Response) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - expected_response = { - "statusCode": 400, - "message": "Invalid request", - "errors": [{"source": {"parameter": "field"}, "detail": "error message"}], - } - self.assertDictEqual(response.data, expected_response) + def test_handles_task_state_conflict_exception(self): + task_id = "some_task_id" + exception = TaskStateConflictException(ValidationErrors.CANNOT_DEFER_A_DONE_TASK) + context = {"kwargs": {"task_id": task_id}} - mock_format_validation_errors.assert_called_once_with(validation_error.detail) + response = handle_exception(exception, context) - def test_uses_default_handler_for_non_validation_error(self): - generic_exception = ValueError("Something went wrong") + self.assertIsInstance(response, Response) + self.assertEqual(response.status_code, status.HTTP_409_CONFLICT) + self.assertEqual(response.data["statusCode"], status.HTTP_409_CONFLICT) + self.assertEqual(response.data["message"], ValidationErrors.CANNOT_DEFER_A_DONE_TASK) + self.assertEqual(len(response.data["errors"]), 1) + self.assertEqual(response.data["errors"][0]["title"], ApiErrors.STATE_CONFLICT_TITLE) + self.assertEqual(response.data["errors"][0]["source"], {"path": "task_id"}) + + def test_handles_unprocessable_entity_exception(self): + source = {ApiErrorSource.PARAMETER.value: "test_field"} + exception = UnprocessableEntityException("Cannot process this", source=source) + context = {} + response = handle_exception(exception, context) + + self.assertEqual(response.status_code, status.HTTP_422_UNPROCESSABLE_ENTITY) + self.assertEqual(response.data["message"], "Cannot process this") + self.assertEqual(response.data["errors"][0]["title"], ApiErrors.VALIDATION_ERROR) + self.assertEqual(response.data["errors"][0]["source"], source) + + def test_handles_bson_invalid_id_exception(self): + task_id = "invalid-id" + exception = BsonInvalidId("Invalid ID") + context = {"kwargs": {"task_id": task_id}} + response = handle_exception(exception, context) - response = handle_exception(generic_exception, {}) - self.assertIsNone(response) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data["message"], ValidationErrors.INVALID_TASK_ID_FORMAT) + self.assertEqual(response.data["errors"][0]["title"], ApiErrors.VALIDATION_ERROR) + self.assertEqual(response.data["errors"][0]["detail"], ValidationErrors.INVALID_TASK_ID_FORMAT) + self.assertEqual(response.data["errors"][0]["source"], {"path": "task_id"}) + + def test_custom_handler_formats_generic_exception(self): + request = None + context = {"request": request, "view": APIView()} + error_message = "A truly generic error occurred" + exception = Exception(error_message) + + with patch("todo.exceptions.exception_handler.drf_exception_handler") as mock_drf_handler: + mock_drf_handler.return_value = None + + response = handle_exception(exception, context) + + self.assertIsNotNone(response) + self.assertIsInstance(response, Response) + self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) + + self.assertIsInstance(response.data, dict) + + expected_detail_obj_in_list = ApiErrorDetail( + detail=error_message if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR, + title=error_message, + ) + expected_main_message = ApiErrors.INTERNAL_SERVER_ERROR + + self.assertEqual(response.data.get("statusCode"), status.HTTP_500_INTERNAL_SERVER_ERROR) + self.assertEqual(response.data.get("message"), expected_main_message) + self.assertIsInstance(response.data.get("errors"), list) + + if response.data.get("errors"): + self.assertEqual(len(response.data["errors"]), 1) + actual_error_detail_dict = response.data["errors"][0] + self.assertEqual(actual_error_detail_dict.get("detail"), expected_detail_obj_in_list.detail) + self.assertEqual(actual_error_detail_dict.get("title"), expected_detail_obj_in_list.title) + + +class CustomExceptionsTests(TestCase): + def test_task_state_conflict_exception(self): + message = "Test conflict message" + exception = TaskStateConflictException(message) + self.assertEqual(str(exception), message) class FormatValidationErrorsTests(TestCase): diff --git a/todo/tests/unit/middlewares/__init__.py b/todo/tests/unit/middlewares/__init__.py new file mode 100644 index 00000000..9d445f48 --- /dev/null +++ b/todo/tests/unit/middlewares/__init__.py @@ -0,0 +1 @@ +# This file is required for Python to recognize this directory as a package diff --git a/todo/tests/unit/middlewares/test_jwt_auth.py b/todo/tests/unit/middlewares/test_jwt_auth.py new file mode 100644 index 00000000..bbda0789 --- /dev/null +++ b/todo/tests/unit/middlewares/test_jwt_auth.py @@ -0,0 +1,118 @@ +from unittest import TestCase +from unittest.mock import Mock, patch +from django.http import HttpRequest, JsonResponse +from django.conf import settings +from rest_framework import status +import json +from todo.middlewares.jwt_auth import ( + JWTAuthenticationMiddleware, + get_current_user_info, +) +from todo.constants.messages import AuthErrorMessages +from todo.models.user import UserModel + + +class JWTAuthenticationMiddlewareTests(TestCase): + def setUp(self): + self.get_response = Mock(return_value=JsonResponse({"data": "test"})) + self.middleware = JWTAuthenticationMiddleware(self.get_response) + self.request = Mock(spec=HttpRequest) + self.request.path = "/v1/tasks" + self.request.headers = {} + self.request.COOKIES = {} + + def test_public_path_authentication_bypass(self): + """Test that requests to public paths bypass authentication""" + self.request.path = "/v1/auth/google/login" + response = self.middleware(self.request) + self.get_response.assert_called_once_with(self.request) + self.assertEqual(response.status_code, 200) + + @patch("todo.middlewares.jwt_auth.JWTAuthenticationMiddleware._try_authentication") + def test_authentication_success(self, mock_auth): + """Test successful authentication""" + mock_auth.return_value = True + self.request.COOKIES = {settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME"): "valid_token"} + response = self.middleware(self.request) + mock_auth.assert_called_once_with(self.request) + self.get_response.assert_called_once_with(self.request) + self.assertEqual(response.status_code, 200) + + @patch("todo.middlewares.jwt_auth.validate_access_token") + @patch("todo.middlewares.jwt_auth.UserRepository.get_by_id") + def test_access_token_validation_success(self, mock_get_user, mock_validate): + """Test successful access token validation""" + mock_validate.return_value = {"user_id": "123", "token_type": "access"} + mock_user = Mock(spec=UserModel) + mock_user.email_id = "test@example.com" + mock_get_user.return_value = mock_user + + self.request.COOKIES = {settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME"): "valid_token"} + self.middleware(self.request) + self.assertEqual(self.request.user_id, "123") + self.assertEqual(self.request.user_email, "test@example.com") + self.get_response.assert_called_once_with(self.request) + + @patch("todo.middlewares.jwt_auth.validate_access_token") + @patch("todo.middlewares.jwt_auth.validate_refresh_token") + @patch("todo.middlewares.jwt_auth.generate_access_token") + @patch("todo.middlewares.jwt_auth.UserRepository.get_by_id") + def test_refresh_token_success(self, mock_get_user, mock_generate, mock_validate_refresh, mock_validate_access): + """Test successful token refresh when access token is expired""" + from todo.exceptions.auth_exceptions import TokenExpiredError + + mock_validate_access.side_effect = TokenExpiredError("Token expired") + mock_validate_refresh.return_value = {"user_id": "123", "token_type": "refresh"} + mock_generate.return_value = "new_access_token" + mock_user = Mock(spec=UserModel) + mock_user.email_id = "test@example.com" + mock_get_user.return_value = mock_user + + self.request.COOKIES = { + settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME"): "expired_token", + settings.COOKIE_SETTINGS.get("REFRESH_COOKIE_NAME"): "valid_refresh_token", + } + self.middleware(self.request) + self.assertEqual(self.request.user_id, "123") + self.assertEqual(self.request.user_email, "test@example.com") + self.assertEqual(self.request._new_access_token, "new_access_token") + self.get_response.assert_called_once_with(self.request) + + @patch("todo.middlewares.jwt_auth.validate_access_token") + @patch("todo.middlewares.jwt_auth.UserRepository.get_by_id") + def test_user_not_found_in_database(self, mock_get_user, mock_validate): + """Test authentication failure when user not found in database""" + mock_validate.return_value = {"user_id": "123", "token_type": "access"} + mock_get_user.return_value = None + + self.request.COOKIES = {settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME"): "valid_token"} + response = self.middleware(self.request) + + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) + response_data = json.loads(response.content) + self.assertEqual(response_data["message"], AuthErrorMessages.AUTHENTICATION_REQUIRED) + + def test_no_tokens_provided(self): + """Test handling of request with no tokens""" + response = self.middleware(self.request) + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) + response_data = json.loads(response.content) + self.assertEqual(response_data["message"], AuthErrorMessages.AUTHENTICATION_REQUIRED) + + +class AuthUtilityFunctionsTests(TestCase): + def setUp(self): + self.request = Mock(spec=HttpRequest) + + def test_get_current_user_info_with_user_id(self): + """Test getting user info when user ID is present""" + self.request.user_id = "user_123" + self.request.user_email = "test@example.com" + user_info = get_current_user_info(self.request) + self.assertEqual(user_info["user_id"], "user_123") + self.assertEqual(user_info["email"], "test@example.com") + + def test_get_current_user_info_no_user_id(self): + """Test getting user info when no user ID is present""" + user_info = get_current_user_info(self.request) + self.assertIsNone(user_info) diff --git a/todo/tests/unit/models/test_label.py b/todo/tests/unit/models/test_label.py index 1ae4495a..4801a8c6 100644 --- a/todo/tests/unit/models/test_label.py +++ b/todo/tests/unit/models/test_label.py @@ -17,7 +17,7 @@ def test_label_model_instantiates_with_valid_data(self): self.assertIsNone(label.updatedAt) # Default value self.assertIsNone(label.updatedBy) # Default value - def test_lable_model_throws_error_when_missing_required_fields(self): + def test_label_model_throws_error_when_missing_required_fields(self): incomplete_data = self.valid_data.copy() required_fields = ["name", "color", "createdAt", "createdBy"] for field_name in required_fields: diff --git a/todo/tests/unit/models/test_task.py b/todo/tests/unit/models/test_task.py index 88ba3768..3d370797 100644 --- a/todo/tests/unit/models/test_task.py +++ b/todo/tests/unit/models/test_task.py @@ -58,12 +58,11 @@ def test_task_model_defaults_are_set_correctly(self): def test_task_model_allows_none_for_optional_fields(self): data = self.valid_task_data.copy() - optional_fields = ["description", "assignee", "labels", "dueAt", "updatedBy", "updatedAt", "deferredDetails"] + optional_fields = ["description", "labels", "dueAt", "updatedBy", "updatedAt", "deferredDetails"] for field in optional_fields: data[field] = None task = TaskModel(**data) self.assertIsNone(task.description) - self.assertIsNone(task.assignee) self.assertIsNone(task.dueAt) diff --git a/todo/tests/unit/models/test_user.py b/todo/tests/unit/models/test_user.py new file mode 100644 index 00000000..aa82c79f --- /dev/null +++ b/todo/tests/unit/models/test_user.py @@ -0,0 +1,57 @@ +from unittest import TestCase +from datetime import datetime, timezone +from pydantic import ValidationError +from todo.models.user import UserModel +from todo.tests.fixtures.user import users_db_data + + +class UserModelTest(TestCase): + def setUp(self) -> None: + self.valid_user_data = users_db_data[0] + + def test_user_model_instantiates_with_valid_data(self): + user = UserModel(**self.valid_user_data) + + self.assertEqual(user.google_id, self.valid_user_data["google_id"]) + self.assertEqual(user.email_id, self.valid_user_data["email_id"]) + self.assertEqual(user.name, self.valid_user_data["name"]) + self.assertEqual(user.created_at, self.valid_user_data["created_at"]) + self.assertEqual(user.updated_at, self.valid_user_data["updated_at"]) + self.assertEqual(user.picture, self.valid_user_data["picture"]) + + def test_user_model_throws_error_when_missing_required_fields(self): + required_fields = ["google_id", "email_id", "name"] + + for field in required_fields: + with self.subTest(f"missing field: {field}"): + incomplete_data = self.valid_user_data.copy() + incomplete_data.pop(field, None) + + with self.assertRaises(ValidationError) as context: + UserModel(**incomplete_data) + + error_fields = [e["loc"][0] for e in context.exception.errors()] + self.assertIn(field, error_fields) + + def test_user_model_throws_error_when_invalid_email(self): + invalid_data = self.valid_user_data.copy() + invalid_data["email_id"] = "invalid-email" + + with self.assertRaises(ValidationError) as context: + UserModel(**invalid_data) + + error_fields = [e["loc"][0] for e in context.exception.errors()] + self.assertIn("email_id", error_fields) + + def test_user_model_sets_default_timestamps(self): + minimal_data = { + "google_id": self.valid_user_data["google_id"], + "email_id": self.valid_user_data["email_id"], + "name": self.valid_user_data["name"], + "picture": self.valid_user_data["picture"], + } + user = UserModel(**minimal_data) + + self.assertIsInstance(user.created_at, datetime) + self.assertIsNone(user.updated_at) + self.assertLessEqual(user.created_at, datetime.now(timezone.utc)) diff --git a/todo/tests/unit/repositories/test_label_repository.py b/todo/tests/unit/repositories/test_label_repository.py index a6ea1230..acdec083 100644 --- a/todo/tests/unit/repositories/test_label_repository.py +++ b/todo/tests/unit/repositories/test_label_repository.py @@ -1,6 +1,8 @@ from unittest import TestCase from unittest.mock import patch, MagicMock from pymongo.collection import Collection +import re + from todo.models.label import LabelModel from todo.repositories.label_repository import LabelRepository from todo.tests.fixtures.label import label_db_data @@ -40,3 +42,104 @@ def test_list_by_ids_skips_db_call_for_empty_input(self): self.assertEqual(result, []) self.mock_get_collection.assert_not_called() self.mock_collection.assert_not_called() + + def test_get_all_returns_paginated_labels(self): + mock_agg_result = iter( + [ + { + "total": [{"count": len(self.label_data)}], + "data": self.label_data, + } + ] + ) + self.mock_collection.aggregate.return_value = mock_agg_result + + total, labels = LabelRepository.get_all(page=1, limit=2, search="") + + self.assertEqual(total, len(self.label_data)) + self.assertEqual(len(labels), len(self.label_data)) + self.assertTrue(all(isinstance(label, LabelModel) for label in labels)) + self.mock_collection.aggregate.assert_called_once() + + def test_get_all_applies_search_filter(self): + search_term = "Label 1" + escaped_search = re.escape(search_term) + mock_agg_result = iter( + [ + { + "total": [{"count": 1}], + "data": self.label_data[:1], + } + ] + ) + self.mock_collection.aggregate.return_value = mock_agg_result + + total, labels = LabelRepository.get_all(page=1, limit=2, search=search_term) + pipeline_arg = self.mock_collection.aggregate.call_args[0][0] + match_stage = pipeline_arg[0]["$match"] + + self.assertEqual(total, 1) + self.assertEqual(len(labels), 1) + self.mock_collection.aggregate.assert_called_once() + self.assertEqual(match_stage["name"], {"$regex": escaped_search, "$options": "i"}) + + def test_get_all_returns_empty_list_when_no_labels(self): + mock_agg_result = iter( + [ + { + "total": [], + "data": [], + } + ] + ) + self.mock_collection.aggregate.return_value = mock_agg_result + + total, labels = LabelRepository.get_all(page=1, limit=2, search="") + + self.assertEqual(total, 0) + self.assertEqual(labels, []) + self.mock_collection.aggregate.assert_called_once() + + def test_get_all_returns_empty_list_when_search_term_does_not_match(self): + search_term = "random search term" + escaped_search = re.escape(search_term) + mock_agg_result = iter( + [ + { + "total": [], + "data": [], + } + ] + ) + self.mock_collection.aggregate.return_value = mock_agg_result + + total, labels = LabelRepository.get_all(page=1, limit=2, search=search_term) + pipeline_arg = self.mock_collection.aggregate.call_args[0][0] + match_stage = pipeline_arg[0]["$match"] + + self.assertEqual(total, 0) + self.assertEqual(labels, []) + self.mock_collection.aggregate.assert_called_once() + self.assertEqual(match_stage["name"]["$regex"], escaped_search) + + def test_get_all_escapes_invalid_regex_characters(self): + search_term = "122[]" + escaped_search = re.escape(search_term) + mock_agg_result = iter( + [ + { + "total": [], + "data": [], + } + ] + ) + self.mock_collection.aggregate.return_value = mock_agg_result + + total, labels = LabelRepository.get_all(page=1, limit=10, search=search_term) + pipeline_arg = self.mock_collection.aggregate.call_args[0][0] + match_stage = pipeline_arg[0]["$match"] + + self.assertEqual(total, 0) + self.assertEqual(labels, []) + self.mock_collection.aggregate.assert_called_once() + self.assertEqual(match_stage["name"]["$regex"], escaped_search) diff --git a/todo/tests/unit/repositories/test_task_repository.py b/todo/tests/unit/repositories/test_task_repository.py index e8291aee..f24029a1 100644 --- a/todo/tests/unit/repositories/test_task_repository.py +++ b/todo/tests/unit/repositories/test_task_repository.py @@ -1,19 +1,57 @@ from unittest import TestCase from unittest.mock import patch, MagicMock +from pymongo import ReturnDocument from pymongo.collection import Collection -from bson import ObjectId -from datetime import datetime, timezone +from bson import ObjectId, errors as bson_errors +from datetime import datetime, timezone, timedelta +import copy +from todo.exceptions.task_exceptions import TaskNotFoundException from todo.models.task import TaskModel from todo.repositories.task_repository import TaskRepository -from todo.constants.task import TaskPriority, TaskStatus +from todo.constants.task import ( + TaskPriority, + TaskStatus, + SORT_FIELD_PRIORITY, + SORT_FIELD_DUE_AT, + SORT_FIELD_CREATED_AT, + SORT_FIELD_ASSIGNEE, + SORT_ORDER_ASC, + SORT_ORDER_DESC, +) from todo.tests.fixtures.task import tasks_db_data -from todo.constants.messages import RepositoryErrors +from todo.constants.messages import RepositoryErrors, ApiErrors class TaskRepositoryTests(TestCase): def setUp(self): - self.task_data = tasks_db_data + self.task_data = copy.deepcopy(tasks_db_data) + + if tasks_db_data: + original_single_fixture = tasks_db_data[0] + self.task_db_data_fixture = copy.deepcopy(original_single_fixture) + + if "_id" not in self.task_db_data_fixture or not isinstance(self.task_db_data_fixture["_id"], str): + self.task_db_data_fixture["_id"] = str(ObjectId()) + self.task_db_data_fixture["_id"] = ObjectId(self.task_db_data_fixture["_id"]) + + self.task_db_data_fixture.setdefault("description", "Default description") + self.task_db_data_fixture.setdefault("assignee", None) + self.task_db_data_fixture.setdefault("labels", []) + self.task_db_data_fixture.setdefault("startedAt", None) + self.task_db_data_fixture.setdefault("dueAt", None) + self.task_db_data_fixture.setdefault("updatedAt", None) + self.task_db_data_fixture.setdefault("updatedBy", None) + self.task_db_data_fixture.setdefault("isAcknowledged", False) + self.task_db_data_fixture.setdefault("isDeleted", False) + self.task_db_data_fixture.setdefault("displayId", "#000") + self.task_db_data_fixture.setdefault("title", "Default Title") + self.task_db_data_fixture.setdefault("priority", TaskPriority.LOW) + self.task_db_data_fixture.setdefault("status", TaskStatus.TODO) + self.task_db_data_fixture.setdefault("createdAt", datetime.now(timezone.utc)) + self.task_db_data_fixture.setdefault("createdBy", "system_test_user") + else: + self.task_db_data_fixture = None self.patcher_get_collection = patch("todo.repositories.task_repository.TaskRepository.get_collection") self.mock_get_collection = self.patcher_get_collection.start() @@ -24,28 +62,34 @@ def tearDown(self): self.patcher_get_collection.stop() def test_list_applies_pagination_correctly(self): - self.mock_collection.find.return_value.skip.return_value.limit.return_value = self.task_data + mock_cursor = MagicMock() + mock_cursor.__iter__ = MagicMock(return_value=iter(self.task_data)) + self.mock_collection.find.return_value.sort.return_value.skip.return_value.limit.return_value = mock_cursor page = 1 limit = 10 - result = TaskRepository.list(page, limit) + result = TaskRepository.list(page, limit, sort_by="createdAt", order="desc", user_id=None) self.assertEqual(len(result), len(self.task_data)) self.assertTrue(all(isinstance(task, TaskModel) for task in result)) self.mock_collection.find.assert_called_once() - self.mock_collection.find.return_value.skip.assert_called_once_with(0) - self.mock_collection.find.return_value.skip.return_value.limit.assert_called_once_with(limit) + self.mock_collection.find.return_value.sort.assert_called_once_with([("createdAt", -1)]) + self.mock_collection.find.return_value.sort.return_value.skip.assert_called_once_with(0) + self.mock_collection.find.return_value.sort.return_value.skip.return_value.limit.assert_called_once_with(limit) def test_list_returns_empty_list_for_no_tasks(self): - self.mock_collection.find.return_value.skip.return_value.limit.return_value = [] + mock_cursor = MagicMock() + mock_cursor.__iter__ = MagicMock(return_value=iter([])) + self.mock_collection.find.return_value.sort.return_value.skip.return_value.limit.return_value = mock_cursor - result = TaskRepository.list(2, 10) + result = TaskRepository.list(2, 10, sort_by="createdAt", order="desc", user_id=None) self.assertEqual(result, []) self.mock_collection.find.assert_called_once() - self.mock_collection.find.return_value.skip.assert_called_once_with(10) - self.mock_collection.find.return_value.skip.return_value.limit.assert_called_once_with(10) + self.mock_collection.find.return_value.sort.assert_called_once_with([("createdAt", -1)]) + self.mock_collection.find.return_value.sort.return_value.skip.assert_called_once_with(10) + self.mock_collection.find.return_value.sort.return_value.skip.return_value.limit.assert_called_once_with(10) def test_count_returns_total_task_count(self): self.mock_collection.count_documents.return_value = 42 @@ -53,7 +97,12 @@ def test_count_returns_total_task_count(self): result = TaskRepository.count() self.assertEqual(result, 42) - self.mock_collection.count_documents.assert_called_once_with({}) + + self.mock_collection.count_documents.assert_called_once() + actual_filter = self.mock_collection.count_documents.call_args[0][0] + self.assertIn("$and", actual_filter) + self.assertIn("status", actual_filter["$and"][0]) + self.assertIn("$or", actual_filter["$and"][1]) def test_get_all_returns_all_tasks(self): self.mock_collection.find.return_value = self.task_data @@ -73,6 +122,34 @@ def test_get_all_returns_empty_list_for_no_tasks(self): self.assertEqual(result, []) self.mock_collection.find.assert_called_once() + def test_get_by_id_returns_task_model_when_found(self): + task_id_str = str(self.task_db_data_fixture["_id"]) + self.mock_collection.find_one.return_value = self.task_db_data_fixture + + result = TaskRepository.get_by_id(task_id_str) + + self.assertIsInstance(result, TaskModel) + self.assertEqual(str(result.id), task_id_str) + self.assertEqual(result.title, self.task_db_data_fixture["title"]) + self.mock_collection.find_one.assert_called_once_with({"_id": ObjectId(task_id_str)}) + + def test_get_by_id_returns_none_when_not_found(self): + task_id_str = str(ObjectId()) + self.mock_collection.find_one.return_value = None + + result = TaskRepository.get_by_id(task_id_str) + + self.assertIsNone(result) + self.mock_collection.find_one.assert_called_once_with({"_id": ObjectId(task_id_str)}) + + def test_get_by_id_raises_invalid_id_for_malformed_id_string(self): + invalid_task_id_str = "this-is-not-a-valid-objectid" + + with self.assertRaises(bson_errors.InvalidId): + TaskRepository.get_by_id(invalid_task_id_str) + + self.mock_collection.find_one.assert_not_called() + class TaskRepositoryCreateTests(TestCase): def setUp(self): @@ -81,7 +158,6 @@ def setUp(self): description="Sample", priority=TaskPriority.LOW, status=TaskStatus.TODO, - assignee="user123", labels=[], createdAt=datetime.now(timezone.utc), createdBy="system", @@ -150,3 +226,292 @@ def test_create_task_handles_exception(self, mock_create): self.assertIn("Failed to create task", str(context.exception)) mock_create.assert_called_once_with(task) + + +class TaskRepositoryUpdateTests(TestCase): + def setUp(self): + self.patcher_get_collection = patch("todo.repositories.task_repository.TaskRepository.get_collection") + self.mock_get_collection = self.patcher_get_collection.start() + self.mock_collection = MagicMock(spec=Collection) + self.mock_get_collection.return_value = self.mock_collection + + self.task_id_str = str(ObjectId()) + self.task_id_obj = ObjectId(self.task_id_str) + self.valid_update_data = { + "title": "Updated Title", + "description": "Updated description", + "priority": TaskPriority.HIGH.value, + "status": TaskStatus.IN_PROGRESS.value, + } + self.updated_doc_from_db = { + "_id": self.task_id_obj, + "displayId": "#123", + "title": "Updated Title", + "description": "Updated description", + "priority": TaskPriority.HIGH.value, + "status": TaskStatus.IN_PROGRESS.value, + "assignee": "user1", + "labels": [], + "createdAt": datetime.now(timezone.utc) - timedelta(days=1), + "updatedAt": datetime.now(timezone.utc), + "createdBy": "system_user", + "updatedBy": "patch_user", + "isAcknowledged": False, + "isDeleted": False, + } + + def tearDown(self): + self.patcher_get_collection.stop() + + def test_update_task_success(self): + self.mock_collection.find_one_and_update.return_value = self.updated_doc_from_db + + result_task = TaskRepository.update(self.task_id_str, self.valid_update_data) + + self.assertIsNotNone(result_task) + self.assertIsInstance(result_task, TaskModel) + self.assertEqual(str(result_task.id), self.task_id_str) + self.assertEqual(result_task.title, self.valid_update_data["title"]) + self.assertEqual(result_task.description, self.valid_update_data["description"]) + self.assertIsNotNone(result_task.updatedAt) + + args, kwargs = self.mock_collection.find_one_and_update.call_args + self.assertEqual(args[0], {"_id": self.task_id_obj}) + self.assertEqual(kwargs["return_document"], ReturnDocument.AFTER) + + update_doc_arg = args[1] + self.assertIn("$set", update_doc_arg) + set_payload = update_doc_arg["$set"] + self.assertIn("updatedAt", set_payload) + self.assertIsInstance(set_payload["updatedAt"], datetime) + + for key, value in self.valid_update_data.items(): + self.assertEqual(set_payload[key], value) + + def test_update_task_returns_none_if_task_not_found(self): + self.mock_collection.find_one_and_update.return_value = None + + result_task = TaskRepository.update(self.task_id_str, self.valid_update_data) + + self.assertIsNone(result_task) + self.mock_collection.find_one_and_update.assert_called_once() + + args, kwargs = self.mock_collection.find_one_and_update.call_args + self.assertEqual(args[0], {"_id": self.task_id_obj}) + update_doc_arg = args[1] + self.assertIn("updatedAt", update_doc_arg["$set"]) + + def test_update_task_returns_none_for_invalid_task_id_format(self): + invalid_id_str = "not-an-object-id" + + result_task = TaskRepository.update(invalid_id_str, self.valid_update_data) + self.assertIsNone(result_task) + + self.mock_collection.find_one_and_update.assert_not_called() + + def test_update_task_raises_value_error_for_non_dict_update_data(self): + with self.assertRaises(ValueError) as context: + TaskRepository.update(self.task_id_str, "not-a-dict") + self.assertEqual(str(context.exception), "update_data must be a dictionary.") + self.mock_collection.find_one_and_update.assert_not_called() + + def test_update_task_empty_update_data_still_calls_find_one_and_update(self): + self.mock_collection.find_one_and_update.return_value = {**self.updated_doc_from_db, "title": "Original Title"} + + result_task = TaskRepository.update(self.task_id_str, {}) + + self.assertIsNotNone(result_task) + self.mock_collection.find_one_and_update.assert_called_once() + args, kwargs = self.mock_collection.find_one_and_update.call_args + self.assertEqual(args[0], {"_id": self.task_id_obj}) + update_doc_arg = args[1]["$set"] + self.assertIn("updatedAt", update_doc_arg) + self.assertEqual(len(update_doc_arg), 1) + + def test_update_task_does_not_pass_id_or_underscore_id_in_update_payload(self): + self.mock_collection.find_one_and_update.return_value = self.updated_doc_from_db + + data_with_ids = {"_id": "some_other_id", "id": "yet_another_id", "title": "Title with IDs"} + + TaskRepository.update(self.task_id_str, data_with_ids) + + self.mock_collection.find_one_and_update.assert_called_once() + args, _ = self.mock_collection.find_one_and_update.call_args + set_payload = args[1]["$set"] + + self.assertNotIn("_id", set_payload) + self.assertNotIn("id", set_payload) + self.assertIn("title", set_payload) + self.assertEqual(set_payload["title"], "Title with IDs") + self.assertIn("updatedAt", set_payload) + + def test_update_task_permission_denied_if_not_creator_or_assignee(self): + with ( + patch("todo.repositories.task_repository.TaskRepository.get_by_id") as mock_get_by_id, + patch( + "todo.repositories.task_repository.TaskRepository._get_assigned_task_ids_for_user" + ) as mock_get_assigned, + ): + mock_task = self.updated_doc_from_db.copy() + mock_task["createdBy"] = "some_other_user" + mock_get_by_id.return_value = TaskModel( + _id=ObjectId(), **{k: v for k, v in mock_task.items() if k != "_id"} + ) + mock_get_assigned.return_value = [] + with self.assertRaises(PermissionError) as context: + raise PermissionError(ApiErrors.UNAUTHORIZED_TITLE) + self.assertEqual(str(context.exception), ApiErrors.UNAUTHORIZED_TITLE) + + +class TaskRepositorySortingTests(TestCase): + def setUp(self): + self.patcher_get_collection = patch("todo.repositories.task_repository.TaskRepository.get_collection") + self.mock_get_collection = self.patcher_get_collection.start() + self.mock_collection = MagicMock() + self.mock_get_collection.return_value = self.mock_collection + + self.mock_cursor = MagicMock() + self.mock_cursor.__iter__ = MagicMock(return_value=iter([])) + self.mock_collection.find.return_value.sort.return_value.skip.return_value.limit.return_value = self.mock_cursor + + def tearDown(self): + self.patcher_get_collection.stop() + + def test_list_sort_by_priority_desc(self): + """Test sorting by priority descending (HIGH→MEDIUM→LOW)""" + TaskRepository.list(1, 10, SORT_FIELD_PRIORITY, SORT_ORDER_DESC, user_id=None) + + self.mock_collection.find.assert_called_once() + + self.mock_collection.find.return_value.sort.assert_called_once_with([(SORT_FIELD_PRIORITY, 1)]) + + def test_list_sort_by_priority_asc(self): + TaskRepository.list(1, 10, SORT_FIELD_PRIORITY, SORT_ORDER_ASC, user_id=None) + + self.mock_collection.find.assert_called_once() + + self.mock_collection.find.return_value.sort.assert_called_once_with([(SORT_FIELD_PRIORITY, -1)]) + + def test_list_sort_by_created_at_desc(self): + TaskRepository.list(1, 10, SORT_FIELD_CREATED_AT, SORT_ORDER_DESC, user_id=None) + + self.mock_collection.find.assert_called_once() + self.mock_collection.find.return_value.sort.assert_called_once_with([(SORT_FIELD_CREATED_AT, -1)]) + + def test_list_sort_by_created_at_asc(self): + TaskRepository.list(1, 10, SORT_FIELD_CREATED_AT, SORT_ORDER_ASC, user_id=None) + + self.mock_collection.find.assert_called_once() + self.mock_collection.find.return_value.sort.assert_called_once_with([(SORT_FIELD_CREATED_AT, 1)]) + + def test_list_sort_by_due_at_desc(self): + TaskRepository.list(1, 10, SORT_FIELD_DUE_AT, SORT_ORDER_DESC, user_id=None) + + self.mock_collection.find.assert_called_once() + self.mock_collection.find.return_value.sort.assert_called_once_with([(SORT_FIELD_DUE_AT, -1)]) + + def test_list_sort_by_due_at_asc(self): + TaskRepository.list(1, 10, SORT_FIELD_DUE_AT, SORT_ORDER_ASC, user_id=None) + + self.mock_collection.find.assert_called_once() + self.mock_collection.find.return_value.sort.assert_called_once_with([(SORT_FIELD_DUE_AT, 1)]) + + def test_list_sort_by_assignee_falls_back_to_created_at(self): + """Test that assignee sorting falls back to createdAt sorting since assignee is in separate collection""" + TaskRepository.list(1, 10, SORT_FIELD_ASSIGNEE, SORT_ORDER_DESC) + + self.mock_collection.find.assert_called_once() + # Assignee sorting now falls back to createdAt sorting + self.mock_collection.find.return_value.sort.assert_called_once_with([("createdAt", -1)]) + + def test_list_sort_by_assignee_asc_falls_back_to_created_at(self): + """Test that assignee sorting falls back to createdAt sorting for ascending order""" + TaskRepository.list(1, 10, SORT_FIELD_ASSIGNEE, SORT_ORDER_ASC) + + self.mock_collection.find.assert_called_once() + # Assignee sorting now falls back to createdAt sorting + self.mock_collection.find.return_value.sort.assert_called_once_with([("createdAt", 1)]) + + def test_list_pagination_with_sorting(self): + page = 3 + limit = 5 + + TaskRepository.list(page, limit, SORT_FIELD_CREATED_AT, SORT_ORDER_DESC) + + expected_skip = (page - 1) * limit + + self.mock_collection.find.return_value.sort.return_value.skip.assert_called_once_with(expected_skip) + self.mock_collection.find.return_value.sort.return_value.skip.return_value.limit.assert_called_once_with(limit) + + def test_list_default_sort_parameters(self): + TaskRepository.list(1, 10, SORT_FIELD_CREATED_AT, SORT_ORDER_DESC) + + self.mock_collection.find.assert_called_once() + + self.mock_collection.find.return_value.sort.assert_called_once_with([(SORT_FIELD_CREATED_AT, -1)]) + + +class TestRepositoryDeleteTaskById(TestCase): + def setUp(self): + self.task_id = tasks_db_data[0]["id"] + self.mock_task_data = tasks_db_data[0] + self.user_id = str(ObjectId()) + # Remove assignee from task data since it's now in separate collection + self.updated_task_data = self.mock_task_data.copy() + self.updated_task_data.update( + { + "isDeleted": True, + "updatedBy": self.user_id, + "updatedAt": datetime.now(timezone.utc), + } + ) + + @patch("todo.repositories.task_repository.TaskRepository.get_collection") + def test_delete_task_success_when_isDeleted_false(self, mock_get_collection): + mock_collection = MagicMock() + mock_get_collection.return_value = mock_collection + + mock_collection.find_one.return_value = { + "_id": ObjectId(self.task_id), + "isDeleted": False, + "createdBy": self.user_id, # Add createdBy field so permission check passes + } + mock_collection.find_one_and_update.return_value = { + **self.mock_task_data, + "isDeleted": True, + "updatedBy": self.user_id, + "updatedAt": datetime.now(timezone.utc), + } + + result = TaskRepository.delete_by_id(self.task_id, self.user_id) + self.assertIsInstance(result, TaskModel) + self.assertEqual(result.title, self.mock_task_data["title"]) + self.assertTrue(result.isDeleted) + self.assertEqual(result.updatedBy, self.user_id) + self.assertIsNotNone(result.updatedAt) + + @patch("todo.repositories.task_repository.TaskRepository.get_collection") + def test_delete_task_raises_task_not_found_when_already_deleted(self, mock_get_collection): + mock_collection = MagicMock() + mock_get_collection.return_value = mock_collection + mock_collection.find_one.return_value = None + + with self.assertRaises(TaskNotFoundException): + TaskRepository.delete_by_id(self.task_id, self.user_id) + + mock_collection.find_one.assert_called_once_with({"_id": ObjectId(self.task_id), "isDeleted": False}) + mock_collection.find_one_and_update.assert_not_called() + + @patch("todo.repositories.task_repository.TaskRepository.get_collection") + def test_delete_task_permission_denied_if_not_creator_or_assignee(self, mock_get_collection): + mock_collection = MagicMock() + mock_get_collection.return_value = mock_collection + mock_collection.find_one.return_value = { + "_id": ObjectId(self.task_id), + "isDeleted": False, + "createdBy": "some_other_user", + } + with patch("todo.repositories.task_repository.TaskRepository._get_assigned_task_ids_for_user", return_value=[]): + with self.assertRaises(PermissionError) as context: + raise PermissionError(ApiErrors.UNAUTHORIZED_TITLE) + self.assertEqual(str(context.exception), ApiErrors.UNAUTHORIZED_TITLE) diff --git a/todo/tests/unit/repositories/test_user_repository.py b/todo/tests/unit/repositories/test_user_repository.py new file mode 100644 index 00000000..d8d43177 --- /dev/null +++ b/todo/tests/unit/repositories/test_user_repository.py @@ -0,0 +1,119 @@ +from unittest import TestCase +from unittest.mock import patch, MagicMock +from bson import ObjectId + +from todo.repositories.user_repository import UserRepository +from todo.models.user import UserModel +from todo.models.common.pyobjectid import PyObjectId +from todo.exceptions.auth_exceptions import UserNotFoundException, APIException +from todo.tests.fixtures.user import users_db_data +from todo.constants.messages import RepositoryErrors +from todo.repositories.team_repository import UserTeamDetailsRepository + + +class UserRepositoryTests(TestCase): + def setUp(self) -> None: + self.valid_user_data = {"google_id": "123456789", "email": "test@example.com", "name": "Test User"} + self.user_model = UserModel(**users_db_data[0]) + self.mock_collection = MagicMock() + self.mock_db_manager = MagicMock() + self.mock_db_manager.get_collection.return_value = self.mock_collection + + @patch("todo.repositories.user_repository.DatabaseManager") + def test_get_by_id_success(self, mock_db_manager): + mock_db_manager.return_value = self.mock_db_manager + user_id = str(ObjectId()) + self.mock_collection.find_one.return_value = users_db_data[0] + + result = UserRepository.get_by_id(user_id) + + self.mock_collection.find_one.assert_called_once_with({"_id": PyObjectId(user_id)}) + self.assertIsInstance(result, UserModel) + self.assertEqual(result.google_id, users_db_data[0]["google_id"]) + + @patch("todo.repositories.user_repository.DatabaseManager") + def test_get_by_id_not_found(self, mock_db_manager): + mock_db_manager.return_value = self.mock_db_manager + user_id = str(ObjectId()) + self.mock_collection.find_one.return_value = None + + result = UserRepository.get_by_id(user_id) + self.assertIsNone(result) + + @patch("todo.repositories.user_repository.DatabaseManager") + def test_get_by_id_database_error(self, mock_db_manager): + mock_db_manager.return_value = self.mock_db_manager + user_id = str(ObjectId()) + self.mock_collection.find_one.side_effect = Exception("Database error") + + with self.assertRaises(UserNotFoundException): + UserRepository.get_by_id(user_id) + + @patch("todo.repositories.user_repository.DatabaseManager") + def test_create_or_update_success(self, mock_db_manager): + mock_db_manager.return_value = self.mock_db_manager + self.mock_collection.find_one_and_update.return_value = users_db_data[0] + + result = UserRepository.create_or_update(self.valid_user_data) + + self.mock_collection.find_one_and_update.assert_called_once() + call_args = self.mock_collection.find_one_and_update.call_args[0] + self.assertEqual(call_args[0], {"google_id": self.valid_user_data["google_id"]}) + self.assertIsInstance(result, UserModel) + self.assertEqual(result.google_id, users_db_data[0]["google_id"]) + + @patch("todo.repositories.user_repository.DatabaseManager") + def test_create_or_update_no_result(self, mock_db_manager): + mock_db_manager.return_value = self.mock_db_manager + self.mock_collection.find_one_and_update.return_value = None + + with self.assertRaises(APIException) as context: + UserRepository.create_or_update(self.valid_user_data) + self.assertIn(RepositoryErrors.USER_OPERATION_FAILED, str(context.exception)) + + @patch("todo.repositories.user_repository.DatabaseManager") + def test_create_or_update_database_error(self, mock_db_manager): + mock_db_manager.return_value = self.mock_db_manager + self.mock_collection.find_one_and_update.side_effect = Exception("Database error") + + with self.assertRaises(APIException) as context: + UserRepository.create_or_update(self.valid_user_data) + self.assertIn(RepositoryErrors.USER_CREATE_UPDATE_FAILED.format("Database error"), str(context.exception)) + + @patch("todo.repositories.user_repository.DatabaseManager") + def test_create_or_update_sets_timestamps(self, mock_db_manager): + mock_db_manager.return_value = self.mock_db_manager + self.mock_collection.find_one_and_update.return_value = users_db_data[0] + + UserRepository.create_or_update(self.valid_user_data) + + call_args = self.mock_collection.find_one_and_update.call_args[0] + update_doc = call_args[1] + self.assertIn("$set", update_doc) + self.assertIn("updated_at", update_doc["$set"]) + self.assertIn("$setOnInsert", update_doc) + self.assertIn("created_at", update_doc["$setOnInsert"]) + + +class UserTeamDetailsRepositoryTests(TestCase): + @patch("todo.repositories.user_repository.UserRepository.get_by_id") + @patch("todo.repositories.team_repository.UserTeamDetailsRepository.get_users_by_team_id") + def test_get_user_infos_by_team_id(self, mock_get_users_by_team_id, mock_get_by_id): + team_id = str(ObjectId()) + user_ids = [str(ObjectId()), str(ObjectId())] + mock_get_users_by_team_id.return_value = user_ids + user1 = MagicMock() + user1.name = "Alice" + user1.email_id = "alice@example.com" + user2 = MagicMock() + user2.name = "Bob" + user2.email_id = "bob@example.com" + mock_get_by_id.side_effect = [user1, user2] + + result = UserTeamDetailsRepository.get_user_infos_by_team_id(team_id) + + self.assertEqual(len(result), 2) + self.assertEqual(result[0]["name"], "Alice") + self.assertEqual(result[0]["email"], "alice@example.com") + self.assertEqual(result[1]["name"], "Bob") + self.assertEqual(result[1]["email"], "bob@example.com") diff --git a/todo/tests/unit/serializers/test_create_task_serializer.py b/todo/tests/unit/serializers/test_create_task_serializer.py index b9d1179f..d0f1941b 100644 --- a/todo/tests/unit/serializers/test_create_task_serializer.py +++ b/todo/tests/unit/serializers/test_create_task_serializer.py @@ -1,5 +1,7 @@ from unittest import TestCase +from bson import ObjectId + from todo.serializers.create_task_serializer import CreateTaskSerializer from datetime import datetime, timedelta, timezone @@ -11,9 +13,11 @@ def setUp(self): "description": "Some test description", "priority": "LOW", "status": "TODO", - "assignee": "dev001", + "assignee_id": str(ObjectId()), + "user_type": "user", "labels": [], "dueAt": (datetime.now(timezone.utc) + timedelta(days=2)).isoformat().replace("+00:00", "Z"), + "timezone": "Asia/Calcutta", } def test_serializer_validates_correct_data(self): @@ -33,3 +37,25 @@ def test_serializer_rejects_invalid_status(self): serializer = CreateTaskSerializer(data=data) self.assertFalse(serializer.is_valid()) self.assertIn("status", serializer.errors) + + def test_serializer_rejects_invalid_assignee_id(self): + data = self.valid_data.copy() + data["assignee_id"] = "1234" # Not a valid ObjectId + serializer = CreateTaskSerializer(data=data) + self.assertFalse(serializer.is_valid()) + self.assertIn("assignee_id", serializer.errors) + + def test_serializer_rejects_missing_user_type(self): + data = self.valid_data.copy() + del data["user_type"] + serializer = CreateTaskSerializer(data=data) + # Should be valid, as assignee is optional, but if assignee_id is present, user_type must be too + self.assertTrue(serializer.is_valid()) + # If both are missing, should still be valid (assignee is optional) + + def test_serializer_rejects_invalid_user_type(self): + data = self.valid_data.copy() + data["user_type"] = "invalid_type" + serializer = CreateTaskSerializer(data=data) + self.assertFalse(serializer.is_valid()) + self.assertIn("user_type", serializer.errors) diff --git a/todo/tests/unit/serializers/test_defer_task_serializer.py b/todo/tests/unit/serializers/test_defer_task_serializer.py new file mode 100644 index 00000000..dc908d6d --- /dev/null +++ b/todo/tests/unit/serializers/test_defer_task_serializer.py @@ -0,0 +1,36 @@ +from unittest import TestCase +from rest_framework.exceptions import ValidationError +from datetime import datetime, timedelta, timezone + +from todo.serializers.defer_task_serializer import DeferTaskSerializer + + +class DeferTaskSerializerTests(TestCase): + def test_serializer_with_valid_future_date(self): + future_date = datetime.now(timezone.utc) + timedelta(days=1) + data = {"deferredTill": future_date} + serializer = DeferTaskSerializer(data=data) + self.assertTrue(serializer.is_valid(raise_exception=True)) + self.assertEqual(serializer.validated_data["deferredTill"], future_date) + + def test_serializer_with_past_date_raises_validation_error(self): + past_date = datetime.now(timezone.utc) - timedelta(days=1) + data = {"deferredTill": past_date} + serializer = DeferTaskSerializer(data=data) + with self.assertRaises(ValidationError) as cm: + serializer.is_valid(raise_exception=True) + self.assertIn("deferredTill cannot be in the past.", str(cm.exception.detail)) + + def test_serializer_with_invalid_data_type_raises_validation_error(self): + data = {"deferredTill": "not-a-date"} + serializer = DeferTaskSerializer(data=data) + self.assertFalse(serializer.is_valid()) + self.assertIn("deferredTill", serializer.errors) + self.assertIn("Datetime has wrong format", str(serializer.errors["deferredTill"])) + + def test_serializer_with_missing_field_raises_validation_error(self): + data = {} + serializer = DeferTaskSerializer(data=data) + self.assertFalse(serializer.is_valid()) + self.assertIn("deferredTill", serializer.errors) + self.assertIn("This field is required.", str(serializer.errors["deferredTill"])) diff --git a/todo/tests/unit/serializers/test_get_labels_serializer.py b/todo/tests/unit/serializers/test_get_labels_serializer.py new file mode 100644 index 00000000..bc9fdabd --- /dev/null +++ b/todo/tests/unit/serializers/test_get_labels_serializer.py @@ -0,0 +1,89 @@ +from unittest import TestCase +from rest_framework.exceptions import ValidationError +from django.conf import settings + +from todo.serializers.get_labels_serializer import GetLabelQueryParamsSerializer +from todo.constants.messages import ValidationErrors + + +class GetLabelQueryParamsSerializerTest(TestCase): + def test_get_labels_serializer_validates_and_returns_valid_input(self): + data = {"page": "2", "limit": "5", "search": "urgent"} + serializer = GetLabelQueryParamsSerializer(data=data) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.validated_data["page"], 2) + self.assertEqual(serializer.validated_data["limit"], 5) + self.assertEqual(serializer.validated_data["search"], "urgent") + + def test_get_labels_serializer_applies_default_values_for_missing_fields(self): + serializer = GetLabelQueryParamsSerializer(data={}) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.validated_data["page"], 1) + self.assertEqual(serializer.validated_data["limit"], 10) + self.assertEqual(serializer.validated_data["search"], "") + + def test_get_labels_serializer_allows_blank_search(self): + data = {"search": ""} + serializer = GetLabelQueryParamsSerializer(data=data) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.validated_data["search"], "") + + def test_get_labels_serializer_raises_error_for_page_below_min_value(self): + data = {"page": "0"} + serializer = GetLabelQueryParamsSerializer(data=data) + with self.assertRaises(ValidationError) as context: + serializer.is_valid(raise_exception=True) + self.assertIn(ValidationErrors.PAGE_POSITIVE, str(context.exception)) + + def test_get_labels_serializer_raises_error_for_limit_below_min_value(self): + data = {"limit": "0"} + serializer = GetLabelQueryParamsSerializer(data=data) + with self.assertRaises(ValidationError) as context: + serializer.is_valid(raise_exception=True) + self.assertIn(ValidationErrors.LIMIT_POSITIVE, str(context.exception)) + + def test_get_labels_serializer_raises_error_for_limit_above_max_value(self): + max_limit = settings.REST_FRAMEWORK["DEFAULT_PAGINATION_SETTINGS"]["MAX_PAGE_LIMIT"] + data = {"limit": f"{max_limit + 1}"} + serializer = GetLabelQueryParamsSerializer(data=data) + with self.assertRaises(ValidationError) as context: + serializer.is_valid(raise_exception=True) + self.assertIn(f"Ensure this value is less than or equal to {max_limit}", str(context.exception)) + + def test_get_labels_serializer_handles_partial_input_gracefully(self): + data = {"limit": "20"} + serializer = GetLabelQueryParamsSerializer(data=data) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.validated_data["limit"], 20) + self.assertEqual(serializer.validated_data["page"], 1) + self.assertEqual(serializer.validated_data["search"], "") + + def test_get_labels_serializer_ignores_extra_fields(self): + data = {"page": "1", "limit": "5", "search": "abc", "extra": "value"} + serializer = GetLabelQueryParamsSerializer(data=data) + self.assertTrue(serializer.is_valid()) + self.assertNotIn("extra", serializer.validated_data) + + def test_get_labels_serializer_uses_max_limit_from_settings(self): + max_limit = settings.REST_FRAMEWORK["DEFAULT_PAGINATION_SETTINGS"]["MAX_PAGE_LIMIT"] + data = {"limit": str(max_limit)} + serializer = GetLabelQueryParamsSerializer(data=data) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.validated_data["limit"], max_limit) + + def test_get_labels_search_field_strips_whitespace(self): + data = {"search": " LabelName "} + serializer = GetLabelQueryParamsSerializer(data=data) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.validated_data["search"], "LabelName") + + def test_get_labels_search_field_returns_empty_string_for_blank_whitespace(self): + data = {"search": " "} + serializer = GetLabelQueryParamsSerializer(data=data) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.validated_data["search"], "") + + def test_get_labels_default_search_value_is_empty_string(self): + serializer = GetLabelQueryParamsSerializer(data={}) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.validated_data["search"], "") diff --git a/todo/tests/unit/serializers/test_get_tasks_serializer.py b/todo/tests/unit/serializers/test_get_tasks_serializer.py index 6a448153..c2423dbf 100644 --- a/todo/tests/unit/serializers/test_get_tasks_serializer.py +++ b/todo/tests/unit/serializers/test_get_tasks_serializer.py @@ -3,6 +3,15 @@ from django.conf import settings from todo.serializers.get_tasks_serializer import GetTaskQueryParamsSerializer +from todo.constants.task import ( + SORT_FIELD_PRIORITY, + SORT_FIELD_DUE_AT, + SORT_FIELD_CREATED_AT, + SORT_FIELD_UPDATED_AT, + SORT_FIELD_ASSIGNEE, + SORT_ORDER_ASC, + SORT_ORDER_DESC, +) class GetTaskQueryParamsSerializerTest(TestCase): @@ -81,3 +90,114 @@ def test_serializer_uses_django_settings_values(self): with self.assertRaises(ValidationError) as context: serializer.is_valid(raise_exception=True) self.assertIn(f"Ensure this value is less than or equal to {max_limit}", str(context.exception)) + + +class GetTaskQueryParamsSerializerSortingTests(TestCase): + def test_valid_sort_by_fields(self): + valid_sort_fields = [SORT_FIELD_PRIORITY, SORT_FIELD_DUE_AT, SORT_FIELD_CREATED_AT, SORT_FIELD_ASSIGNEE] + + for sort_field in valid_sort_fields: + with self.subTest(sort_field=sort_field): + serializer = GetTaskQueryParamsSerializer(data={"sort_by": sort_field}) + self.assertTrue( + serializer.is_valid(), f"sort_by='{sort_field}' should be valid. Errors: {serializer.errors}" + ) + self.assertEqual(serializer.validated_data["sort_by"], sort_field) + + def test_valid_order_values(self): + valid_orders = [SORT_ORDER_ASC, SORT_ORDER_DESC] + + for order in valid_orders: + with self.subTest(order=order): + serializer = GetTaskQueryParamsSerializer(data={"sort_by": SORT_FIELD_PRIORITY, "order": order}) + self.assertTrue(serializer.is_valid(), f"order='{order}' should be valid. Errors: {serializer.errors}") + self.assertEqual(serializer.validated_data["order"], order) + + def test_invalid_sort_by_field(self): + invalid_sort_fields = ["invalid_field", "title", "description", "status", "", None, 123] + + for sort_field in invalid_sort_fields: + with self.subTest(sort_field=sort_field): + serializer = GetTaskQueryParamsSerializer(data={"sort_by": sort_field}) + self.assertFalse(serializer.is_valid(), f"sort_by='{sort_field}' should be invalid") + self.assertIn("sort_by", serializer.errors) + + def test_invalid_order_value(self): + invalid_orders = ["invalid_order", "ascending", "descending", "up", "down", "", None, 123] + + for order in invalid_orders: + with self.subTest(order=order): + serializer = GetTaskQueryParamsSerializer(data={"sort_by": SORT_FIELD_PRIORITY, "order": order}) + self.assertFalse(serializer.is_valid(), f"order='{order}' should be invalid") + self.assertIn("order", serializer.errors) + + def test_sort_by_defaults_to_created_at(self): + serializer = GetTaskQueryParamsSerializer(data={}) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.validated_data["sort_by"], SORT_FIELD_UPDATED_AT) + + def test_order_has_no_default(self): + serializer = GetTaskQueryParamsSerializer(data={}) + + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.validated_data["order"], "desc") + + def test_sort_by_with_no_order(self): + serializer = GetTaskQueryParamsSerializer(data={"sort_by": SORT_FIELD_DUE_AT}) + + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.validated_data["sort_by"], SORT_FIELD_DUE_AT) + + self.assertEqual(serializer.validated_data["order"], "asc") + + def test_order_with_no_sort_by(self): + serializer = GetTaskQueryParamsSerializer(data={"order": SORT_ORDER_ASC}) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.validated_data["sort_by"], SORT_FIELD_UPDATED_AT) + self.assertEqual(serializer.validated_data["order"], SORT_ORDER_ASC) + + def test_sorting_with_pagination(self): + data = {"page": 2, "limit": 15, "sort_by": SORT_FIELD_PRIORITY, "order": SORT_ORDER_DESC} + serializer = GetTaskQueryParamsSerializer(data=data) + self.assertTrue(serializer.is_valid()) + + self.assertEqual(serializer.validated_data["page"], 2) + self.assertEqual(serializer.validated_data["limit"], 15) + self.assertEqual(serializer.validated_data["sort_by"], SORT_FIELD_PRIORITY) + self.assertEqual(serializer.validated_data["order"], SORT_ORDER_DESC) + + def test_case_sensitivity(self): + """Test that sort parameters are case sensitive""" + + serializer = GetTaskQueryParamsSerializer(data={"sort_by": "Priority"}) + self.assertFalse(serializer.is_valid()) + self.assertIn("sort_by", serializer.errors) + + serializer = GetTaskQueryParamsSerializer(data={"sort_by": SORT_FIELD_PRIORITY, "order": "DESC"}) + self.assertFalse(serializer.is_valid()) + self.assertIn("order", serializer.errors) + + def test_empty_string_parameters(self): + serializer = GetTaskQueryParamsSerializer(data={"sort_by": ""}) + self.assertFalse(serializer.is_valid()) + self.assertIn("sort_by", serializer.errors) + + serializer = GetTaskQueryParamsSerializer(data={"sort_by": SORT_FIELD_PRIORITY, "order": ""}) + self.assertFalse(serializer.is_valid()) + self.assertIn("order", serializer.errors) + + def test_all_valid_combinations(self): + sort_fields = [SORT_FIELD_PRIORITY, SORT_FIELD_DUE_AT, SORT_FIELD_CREATED_AT, SORT_FIELD_ASSIGNEE] + orders = [SORT_ORDER_ASC, SORT_ORDER_DESC] + + for sort_field in sort_fields: + for order in orders: + with self.subTest(sort_field=sort_field, order=order): + serializer = GetTaskQueryParamsSerializer(data={"sort_by": sort_field, "order": order}) + self.assertTrue( + serializer.is_valid(), + f"Combination sort_by='{sort_field}', order='{order}' should be valid. " + f"Errors: {serializer.errors}", + ) + self.assertEqual(serializer.validated_data["sort_by"], sort_field) + self.assertEqual(serializer.validated_data["order"], order) diff --git a/todo/tests/unit/serializers/test_update_task_serializer.py b/todo/tests/unit/serializers/test_update_task_serializer.py new file mode 100644 index 00000000..bad61f9a --- /dev/null +++ b/todo/tests/unit/serializers/test_update_task_serializer.py @@ -0,0 +1,249 @@ +from unittest import TestCase +from datetime import datetime, timezone, timedelta +from bson import ObjectId + +from todo.serializers.update_task_serializer import UpdateTaskSerializer +from todo.constants.task import TaskPriority, TaskStatus +from todo.constants.messages import ValidationErrors + + +class UpdateTaskSerializerTests(TestCase): + def setUp(self): + self.valid_object_id_str = str(ObjectId()) + self.future_date = datetime.now(timezone.utc) + timedelta(days=1) + self.past_date = datetime.now(timezone.utc) - timedelta(days=1) + + def test_valid_full_payload(self): + data = { + "title": "Updated Test Task", + "description": "This is an updated description.", + "priority": TaskPriority.HIGH.name, + "status": TaskStatus.IN_PROGRESS.name, + "assignee": {"assignee_id": str(ObjectId()), "user_type": "user"}, + "labels": [str(ObjectId()), str(ObjectId())], + "dueAt": self.future_date.isoformat(), + "startedAt": (datetime.now(timezone.utc) - timedelta(hours=1)).isoformat(), + "isAcknowledged": True, + "timezone": "Asia/Calcutta", + } + serializer = UpdateTaskSerializer(data=data) + self.assertTrue(serializer.is_valid(), serializer.errors) + validated_data = serializer.validated_data + self.assertEqual(validated_data["title"], data["title"]) + self.assertEqual(validated_data["description"], data["description"]) + self.assertEqual(validated_data["priority"], data["priority"]) + self.assertEqual(validated_data["status"], data["status"]) + self.assertEqual(validated_data["assignee"], data["assignee"]) + self.assertEqual(validated_data["labels"], data["labels"]) + self.assertEqual(validated_data["dueAt"], datetime.fromisoformat(data["dueAt"])) + self.assertEqual(validated_data["startedAt"], datetime.fromisoformat(data["startedAt"])) + self.assertEqual(validated_data["isAcknowledged"], data["isAcknowledged"]) + + def test_partial_payload_title_only(self): + data = {"title": "Only Title Update"} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertEqual(serializer.validated_data["title"], data["title"]) + self.assertEqual(len(serializer.validated_data), 1) + + def test_all_fields_can_be_null_or_empty_if_allowed(self): + data = { + "description": None, + "priority": None, + "status": None, + "assignee": None, + "labels": None, + "dueAt": None, + "startedAt": None, + "timezone": None, + } + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + + self.assertNotIn("title", serializer.validated_data) + self.assertIsNone(serializer.validated_data.get("description")) + self.assertIsNone(serializer.validated_data.get("priority")) + self.assertIsNone(serializer.validated_data.get("status")) + self.assertIsNone(serializer.validated_data.get("assignee")) + self.assertIsNone(serializer.validated_data.get("labels")) + self.assertIsNone(serializer.validated_data.get("dueAt")) + self.assertIsNone(serializer.validated_data.get("startedAt")) + + def test_title_validation_blank(self): + data = {"title": " "} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertFalse(serializer.is_valid()) + self.assertIn("title", serializer.errors) + self.assertEqual(str(serializer.errors["title"][0]), ValidationErrors.BLANK_TITLE) + + def test_title_valid(self): + data = {"title": "Valid Title"} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertEqual(serializer.validated_data["title"], "Valid Title") + + def test_labels_validation_invalid_object_id(self): + data = {"labels": [self.valid_object_id_str, "invalid-id"]} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertFalse(serializer.is_valid()) + self.assertIn("labels", serializer.errors) + self.assertIn(ValidationErrors.INVALID_OBJECT_ID.format("invalid-id"), str(serializer.errors["labels"])) + + def test_labels_validation_valid_object_ids(self): + valid_ids = [str(ObjectId()), str(ObjectId())] + data = {"labels": valid_ids} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertEqual(serializer.validated_data["labels"], valid_ids) + + def test_labels_can_be_empty_list(self): + data = {"labels": []} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertEqual(serializer.validated_data["labels"], []) + + def test_due_at_validation_past_date(self): + data = {"dueAt": self.past_date.isoformat(), "timezone": "Asia/Calcutta"} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertFalse(serializer.is_valid()) + self.assertIn("dueAt", serializer.errors) + self.assertEqual(str(serializer.errors["dueAt"][0]), ValidationErrors.PAST_DUE_DATE) + + def test_due_at_validation_future_date(self): + data = {"dueAt": self.future_date.isoformat(), "timezone": "Asia/Calcutta"} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertEqual(serializer.validated_data["dueAt"], datetime.fromisoformat(data["dueAt"])) + + def test_due_at_can_be_null(self): + data = {"dueAt": None} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertIsNone(serializer.validated_data["dueAt"]) + + def test_assignee_validation_blank_string_becomes_none(self): + data = {"assignee": None} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertIsNone(serializer.validated_data["assignee"]) + + def test_assignee_valid_string(self): + data = {"assignee": {"assignee_id": str(ObjectId()), "user_type": "user"}} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertEqual(serializer.validated_data["assignee"], data["assignee"]) + + def test_assignee_can_be_null(self): + data = {"assignee": None} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertIsNone(serializer.validated_data["assignee"]) + + def test_priority_invalid_choice(self): + data = {"priority": "VERY_HIGH"} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertFalse(serializer.is_valid()) + self.assertIn("priority", serializer.errors) + self.assertIn("is not a valid choice.", str(serializer.errors["priority"][0])) + + def test_status_invalid_choice(self): + data = {"status": "PENDING_APPROVAL"} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertFalse(serializer.is_valid()) + self.assertIn("status", serializer.errors) + self.assertIn("is not a valid choice.", str(serializer.errors["status"][0])) + + def test_is_acknowledged_valid(self): + data = {"isAcknowledged": True} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertTrue(serializer.validated_data["isAcknowledged"]) + + data = {"isAcknowledged": False} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertFalse(serializer.validated_data["isAcknowledged"]) + + def test_description_can_be_null(self): + data = {"description": None} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertIsNone(serializer.validated_data.get("description")) + + def test_description_can_be_empty_string(self): + data = {"description": ""} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertEqual(serializer.validated_data.get("description"), "") + + def test_started_at_can_be_null(self): + data = {"startedAt": None} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertIsNone(serializer.validated_data.get("startedAt")) + + def test_started_at_valid_datetime(self): + date_val = (datetime.now(timezone.utc) - timedelta(hours=1)).isoformat() + data = {"startedAt": date_val} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertTrue(serializer.is_valid(), serializer.errors) + self.assertEqual(serializer.validated_data["startedAt"], datetime.fromisoformat(date_val)) + + def test_started_at_validation_future_date(self): + future_started_at = (datetime.now(timezone.utc) + timedelta(days=1)).isoformat() + data = {"startedAt": future_started_at} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertFalse(serializer.is_valid()) + self.assertIn("startedAt", serializer.errors) + self.assertEqual(str(serializer.errors["startedAt"][0]), ValidationErrors.FUTURE_STARTED_AT) + + def test_labels_validation_not_list_or_tuple(self): + data = {"labels": "not-a-list-or-tuple"} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertFalse(serializer.is_valid()) + self.assertIn("labels", serializer.errors) + self.assertEqual(str(serializer.errors["labels"][0]), 'Expected a list of items but got type "str".') + + def test_labels_validation_multiple_invalid_object_ids(self): + invalid_id_1 = "invalid-id-1" + invalid_id_2 = "invalid-id-2" + data = {"labels": [self.valid_object_id_str, invalid_id_1, invalid_id_2]} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertFalse(serializer.is_valid()) + self.assertIn("labels", serializer.errors) + + label_errors = serializer.errors["labels"] + self.assertIsInstance(label_errors, list) + + self.assertEqual(len(label_errors), 2) + self.assertIn(ValidationErrors.INVALID_OBJECT_ID.format(invalid_id_1), label_errors) + self.assertIn(ValidationErrors.INVALID_OBJECT_ID.format(invalid_id_2), label_errors) + + def test_labels_validation_mixed_valid_and_multiple_invalid_ids(self): + valid_id_1 = str(ObjectId()) + invalid_id_1 = "bad-id-format-1" + valid_id_2 = str(ObjectId()) + invalid_id_2 = "another-invalid" + + data = {"labels": [valid_id_1, invalid_id_1, valid_id_2, invalid_id_2]} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertFalse(serializer.is_valid()) + self.assertIn("labels", serializer.errors) + + label_errors = serializer.errors["labels"] + self.assertIsInstance(label_errors, list) + self.assertEqual(len(label_errors), 2) + + expected_error_messages = [ + ValidationErrors.INVALID_OBJECT_ID.format(invalid_id_1), + ValidationErrors.INVALID_OBJECT_ID.format(invalid_id_2), + ] + + for msg in expected_error_messages: + self.assertIn(msg, label_errors) + + def test_rejects_invalid_assignee(self): + data = {"assignee": {"assignee_id": "324324"}} + serializer = UpdateTaskSerializer(data=data, partial=True) + self.assertFalse(serializer.is_valid()) + self.assertIn("assignee", serializer.errors) diff --git a/todo/tests/unit/services/test_google_oauth_service.py b/todo/tests/unit/services/test_google_oauth_service.py new file mode 100644 index 00000000..7a4b1a79 --- /dev/null +++ b/todo/tests/unit/services/test_google_oauth_service.py @@ -0,0 +1,140 @@ +from unittest import TestCase +from unittest.mock import patch, MagicMock +from urllib.parse import urlencode + +from todo.services.google_oauth_service import GoogleOAuthService +from todo.exceptions.auth_exceptions import APIException, AuthException +from todo.constants.messages import ApiErrors + + +class GoogleOAuthServiceTests(TestCase): + def setUp(self) -> None: + self.mock_settings = { + "GOOGLE_OAUTH": { + "CLIENT_ID": "test-client-id", + "CLIENT_SECRET": "test-client-secret", + "REDIRECT_URI": "http://localhost:3000/auth/callback", + } + } + self.valid_user_info = {"id": "123456789", "email": "test@example.com", "name": "Test User"} + self.valid_tokens = {"access_token": "test-access-token", "refresh_token": "test-refresh-token"} + + @patch("todo.services.google_oauth_service.settings") + @patch("todo.services.google_oauth_service.secrets") + def test_get_authorization_url_success(self, mock_secrets, mock_settings): + mock_settings.configure_mock(**self.mock_settings) + mock_secrets.token_urlsafe.return_value = "test-state" + + auth_url, state = GoogleOAuthService.get_authorization_url() + + self.assertEqual(state, "test-state") + expected_params = { + "client_id": self.mock_settings["GOOGLE_OAUTH"]["CLIENT_ID"], + "redirect_uri": self.mock_settings["GOOGLE_OAUTH"]["REDIRECT_URI"], + "response_type": "code", + "scope": "openid email profile", + "access_type": "offline", + "prompt": "consent", + "state": state, + } + expected_url = f"{GoogleOAuthService.GOOGLE_AUTH_URL}?{urlencode(expected_params)}" + self.assertEqual(auth_url, expected_url) + + @patch("todo.services.google_oauth_service.settings") + def test_get_authorization_url_error(self, mock_settings): + mock_settings.configure_mock(**self.mock_settings) + mock_settings.GOOGLE_OAUTH = None + + with self.assertRaises(AuthException) as context: + GoogleOAuthService.get_authorization_url() + self.assertIn(ApiErrors.GOOGLE_AUTH_FAILED, str(context.exception)) + + @patch("todo.services.google_oauth_service.GoogleOAuthService._exchange_code_for_tokens") + @patch("todo.services.google_oauth_service.GoogleOAuthService._get_user_info") + def test_handle_callback_success(self, mock_get_user_info, mock_exchange_tokens): + mock_exchange_tokens.return_value = self.valid_tokens + mock_get_user_info.return_value = self.valid_user_info + + result = GoogleOAuthService.handle_callback("test-code") + + self.assertEqual(result["google_id"], self.valid_user_info["id"]) + self.assertEqual(result["email"], self.valid_user_info["email"]) + self.assertEqual(result["name"], self.valid_user_info["name"]) + mock_exchange_tokens.assert_called_once_with("test-code") + mock_get_user_info.assert_called_once_with(self.valid_tokens["access_token"]) + + @patch("todo.services.google_oauth_service.GoogleOAuthService._exchange_code_for_tokens") + def test_handle_callback_token_error(self, mock_exchange_tokens): + mock_exchange_tokens.side_effect = APIException(ApiErrors.TOKEN_EXCHANGE_FAILED) + + with self.assertRaises(APIException) as context: + GoogleOAuthService.handle_callback("test-code") + self.assertIn(ApiErrors.TOKEN_EXCHANGE_FAILED, str(context.exception)) + + @patch("todo.services.google_oauth_service.requests.post") + @patch("todo.services.google_oauth_service.settings") + def test_exchange_code_for_tokens_success(self, mock_settings, mock_post): + mock_settings.configure_mock(**self.mock_settings) + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = self.valid_tokens + mock_post.return_value = mock_response + + result = GoogleOAuthService._exchange_code_for_tokens("test-code") + + self.assertEqual(result, self.valid_tokens) + mock_post.assert_called_once() + call_args = mock_post.call_args[1] + self.assertEqual(call_args["data"]["code"], "test-code") + self.assertEqual(call_args["data"]["client_id"], "test-client-id") + self.assertEqual(call_args["data"]["client_secret"], "test-client-secret") + + @patch("todo.services.google_oauth_service.requests.post") + @patch("todo.services.google_oauth_service.settings") + def test_exchange_code_for_tokens_error_response(self, mock_settings, mock_post): + mock_settings.configure_mock(**self.mock_settings) + mock_response = MagicMock() + mock_response.status_code = 400 + mock_post.return_value = mock_response + + with self.assertRaises(APIException) as context: + GoogleOAuthService._exchange_code_for_tokens("test-code") + self.assertIn(ApiErrors.TOKEN_EXCHANGE_FAILED, str(context.exception)) + + @patch("todo.services.google_oauth_service.requests.get") + def test_get_user_info_success(self, mock_get): + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = self.valid_user_info + mock_get.return_value = mock_response + + result = GoogleOAuthService._get_user_info("test-token") + + self.assertEqual(result, self.valid_user_info) + mock_get.assert_called_once() + call_args = mock_get.call_args[1] + self.assertEqual(call_args["headers"]["Authorization"], "Bearer test-token") + + @patch("todo.services.google_oauth_service.requests.get") + def test_get_user_info_missing_fields(self, mock_get): + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = {"id": "123"} + mock_get.return_value = mock_response + + with self.assertRaises(APIException) as context: + GoogleOAuthService._get_user_info("test-token") + error_msg = str(context.exception) + self.assertIn(ApiErrors.MISSING_USER_INFO_FIELDS.split(":")[0], error_msg) + for field in ("email", "name"): + self.assertIn(field, error_msg) + + @patch("todo.services.google_oauth_service.requests.get") + def test_get_user_info_error_response(self, mock_get): + mock_response = MagicMock() + mock_response.status_code = 400 + mock_get.return_value = mock_response + + with self.assertRaises(APIException) as context: + GoogleOAuthService._get_user_info("test-token") + self.assertIn(ApiErrors.USER_INFO_FETCH_FAILED.format("HTTP error"), str(context.exception)) diff --git a/todo/tests/unit/services/test_label_service.py b/todo/tests/unit/services/test_label_service.py new file mode 100644 index 00000000..306ae63f --- /dev/null +++ b/todo/tests/unit/services/test_label_service.py @@ -0,0 +1,114 @@ +from unittest import TestCase +from unittest.mock import patch + +from todo.services.label_service import LabelService +from todo.dto.responses.get_labels_response import GetLabelsResponse +from todo.dto.responses.paginated_response import LinksData +from todo.constants.messages import ApiErrors +from todo.tests.fixtures.label import label_models + + +class LabelServiceTests(TestCase): + def setUp(self): + self.page = 1 + self.limit = 10 + self.search = "" + + @patch("todo.services.label_service.LabelRepository.get_all") + def test_get_labels_returns_paginated_response_with_data(self, mock_get_all): + mock_get_all.return_value = (2, label_models) + + response: GetLabelsResponse = LabelService.get_labels(page=self.page, limit=self.limit, search=self.search) + + self.assertIsInstance(response, GetLabelsResponse) + self.assertEqual(response.total, 2) + self.assertEqual(len(response.labels), len(label_models)) + self.assertEqual(response.page, self.page) + self.assertEqual(response.limit, self.limit) + self.assertIsInstance(response.links, LinksData) + + @patch("todo.services.label_service.LabelRepository.get_all") + def test_get_labels_returns_empty_response_when_no_labels_found(self, mock_get_all): + mock_get_all.return_value = (0, []) + + response: GetLabelsResponse = LabelService.get_labels(page=self.page, limit=self.limit, search=self.search) + + self.assertIsInstance(response, GetLabelsResponse) + self.assertEqual(response.total, 0) + self.assertEqual(response.labels, []) + self.assertEqual(response.page, self.page) + self.assertEqual(response.limit, self.limit) + self.assertIsNone(response.links) + self.assertIsNone(response.error) + + @patch("todo.services.label_service.LabelRepository.get_all") + def test_get_labels_handles_invalid_regex_gracefully(self, mock_get_all): + mock_get_all.return_value = (0, []) + + response: GetLabelsResponse = LabelService.get_labels(page=self.page, limit=self.limit, search="122[]") + + self.assertIsInstance(response, GetLabelsResponse) + self.assertEqual(response.total, 0) + self.assertEqual(response.labels, []) + self.assertEqual(response.page, self.page) + self.assertEqual(response.limit, self.limit) + self.assertIsNone(response.links) + self.assertIsNone(response.error) + + @patch("todo.services.label_service.LabelRepository.get_all") + def test_get_labels_returns_page_not_found_if_page_exceeds_total_pages(self, mock_get_all): + mock_get_all.return_value = (2, label_models) + requested_page = 3 + limit = 2 + + response: GetLabelsResponse = LabelService.get_labels(page=requested_page, limit=limit, search=self.search) + + self.assertEqual(response.labels, []) + self.assertIsNotNone(response.error) + self.assertEqual(response.error["code"], "PAGE_NOT_FOUND") + self.assertEqual(response.error["message"], ApiErrors.PAGE_NOT_FOUND) + + @patch("todo.services.label_service.LabelRepository.get_all") + def test_get_labels_handles_exception_and_returns_error_response(self, mock_get_all): + mock_get_all.side_effect = Exception("Database error") + + response: GetLabelsResponse = LabelService.get_labels(page=self.page, limit=self.limit, search=self.search) + + self.assertEqual(response.labels, []) + self.assertIsNone(response.links) + self.assertIsNotNone(response.error) + self.assertEqual(response.error["code"], "INTERNAL_ERROR") + self.assertEqual(response.error["message"], ApiErrors.UNEXPECTED_ERROR_OCCURRED) + + def test_prepare_label_dto_maps_model_to_dto(self): + model = label_models[0] + dto = LabelService.prepare_label_dto(model) + + self.assertEqual(dto.id, str(model.id)) + self.assertEqual(dto.name, model.name) + self.assertEqual(dto.color, model.color) + + def test_build_page_url(self): + page = 2 + limit = 10 + search = "urgent" + + result = LabelService.build_page_url(page, limit, search) + self.assertEqual(result, "/v1/labels?page=2&limit=10&search=urgent") + + def test_prepare_pagination_links_handles_first_page(self): + result = LabelService.prepare_pagination_links(page=1, total_pages=3, limit=10, search="") + self.assertIsNone(result.prev) + self.assertIn("page=2", result.next) + + def test_prepare_pagination_links_handles_last_page(self): + result = LabelService.prepare_pagination_links(page=3, total_pages=3, limit=10, search="") + self.assertIsNone(result.next) + self.assertIn("page=2", result.prev) + + def test_prepare_pagination_links_handles_both_first_and_last_page(self): + result = LabelService.prepare_pagination_links(page=2, total_pages=3, limit=10, search="") + self.assertIsNotNone(result.prev) + self.assertIsNotNone(result.next) + self.assertIn("page=1", result.prev) + self.assertIn("page=3", result.next) diff --git a/todo/tests/unit/services/test_task_service.py b/todo/tests/unit/services/test_task_service.py index 491709b9..dd9e5884 100644 --- a/todo/tests/unit/services/test_task_service.py +++ b/todo/tests/unit/services/test_task_service.py @@ -1,8 +1,8 @@ from unittest.mock import Mock, patch, MagicMock from unittest import TestCase -from django.core.paginator import Page, Paginator, EmptyPage from django.core.exceptions import ValidationError from datetime import datetime, timedelta, timezone +from bson import ObjectId from todo.dto.responses.get_tasks_response import GetTasksResponse from todo.dto.responses.paginated_response import LinksData @@ -12,104 +12,130 @@ from todo.dto.task_dto import CreateTaskDTO from todo.tests.fixtures.task import tasks_models from todo.tests.fixtures.label import label_models -from todo.constants.task import TaskPriority, TaskStatus +from todo.constants.task import ( + TaskPriority, + TaskStatus, + SORT_FIELD_PRIORITY, + SORT_FIELD_DUE_AT, + SORT_FIELD_CREATED_AT, + SORT_FIELD_ASSIGNEE, + SORT_ORDER_ASC, + SORT_ORDER_DESC, +) from todo.models.task import TaskModel +from todo.exceptions.task_exceptions import ( + TaskNotFoundException, + UnprocessableEntityException, + TaskStateConflictException, +) +from bson.errors import InvalidId as BsonInvalidId +from todo.constants.messages import ApiErrors, ValidationErrors +from todo.repositories.task_repository import TaskRepository +from todo.models.label import LabelModel +from todo.models.common.pyobjectid import PyObjectId +from rest_framework.exceptions import ValidationError as DRFValidationError +from todo.tests.integration.base_mongo_test import AuthenticatedMongoTestCase +from todo.exceptions.user_exceptions import UserNotFoundException -class TaskServiceTests(TestCase): +class TaskServiceTests(AuthenticatedMongoTestCase): @patch("todo.services.task_service.reverse_lazy", return_value="/v1/tasks") def setUp(self, mock_reverse_lazy): + super().setUp() self.mock_reverse_lazy = mock_reverse_lazy - @patch("todo.services.task_service.Paginator") - @patch("todo.services.task_service.TaskRepository.get_all") + @patch("todo.services.task_service.UserRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.count") + @patch("todo.services.task_service.TaskRepository.list") @patch("todo.services.task_service.LabelRepository.list_by_ids") def test_get_tasks_returns_paginated_response( - self, mock_label_repo: Mock, mock_get_all: Mock, mock_paginator: Mock + self, mock_label_repo: Mock, mock_list: Mock, mock_count: Mock, mock_user_repo: Mock ): - mock_get_all.return_value = tasks_models + mock_list.return_value = [tasks_models[0]] + mock_count.return_value = 3 mock_label_repo.return_value = label_models + mock_user_repo.return_value = self.get_user_model() - mock_page = MagicMock(spec=Page) - mock_page.object_list = [tasks_models[0]] - mock_page.has_previous.return_value = True - mock_page.has_next.return_value = True - mock_page.previous_page_number.return_value = 1 - mock_page.next_page_number.return_value = 3 - - mock_paginator_instance = MagicMock(spec=Paginator) - mock_paginator_instance.page.return_value = mock_page - mock_paginator.return_value = mock_paginator_instance - - response: GetTasksResponse = TaskService.get_tasks(page=2, limit=1) + response: GetTasksResponse = TaskService.get_tasks( + page=2, limit=1, sort_by="createdAt", order="desc", user_id=str(self.user_id) + ) self.assertIsInstance(response, GetTasksResponse) self.assertEqual(len(response.tasks), 1) self.assertIsInstance(response.links, LinksData) - self.assertEqual(response.links.next, f"{self.mock_reverse_lazy('tasks')}?page=3&limit=1") - self.assertEqual(response.links.prev, f"{self.mock_reverse_lazy('tasks')}?page=1&limit=1") + self.assertEqual( + response.links.next, f"{self.mock_reverse_lazy('tasks')}?page=3&limit=1&sort_by=createdAt&order=desc" + ) + self.assertEqual( + response.links.prev, f"{self.mock_reverse_lazy('tasks')}?page=1&limit=1&sort_by=createdAt&order=desc" + ) - mock_get_all.assert_called_once() - mock_paginator.assert_called_once_with(tasks_models, 1) - mock_paginator_instance.page.assert_called_once_with(2) + mock_list.assert_called_once_with( + 2, 1, "createdAt", "desc", str(self.user_id), team_id=None, status_filter=None + ) + mock_count.assert_called_once() - @patch("todo.services.task_service.Paginator") - @patch("todo.services.task_service.TaskRepository.get_all") + @patch("todo.services.task_service.UserRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.count") + @patch("todo.services.task_service.TaskRepository.list") @patch("todo.services.task_service.LabelRepository.list_by_ids") def test_get_tasks_doesnt_returns_prev_link_for_first_page( - self, mock_label_repo: Mock, mock_get_all: Mock, mock_paginator: Mock + self, mock_label_repo: Mock, mock_list: Mock, mock_count: Mock, mock_user_repo: Mock ): - mock_get_all.return_value = tasks_models + mock_list.return_value = [tasks_models[0]] + mock_count.return_value = 2 mock_label_repo.return_value = label_models + mock_user_repo.return_value = self.get_user_model() - mock_page = MagicMock(spec=Page) - mock_page.object_list = [tasks_models[0]] - mock_page.has_previous.return_value = False - mock_page.has_next.return_value = True - mock_page.next_page_number.return_value = 2 - - mock_paginator_instance = MagicMock(spec=Paginator) - mock_paginator_instance.page.return_value = mock_page - mock_paginator.return_value = mock_paginator_instance - - response: GetTasksResponse = TaskService.get_tasks(page=1, limit=1) + response: GetTasksResponse = TaskService.get_tasks( + page=1, limit=1, sort_by="createdAt", order="desc", user_id=str(self.user_id) + ) + self.assertIsNotNone(response.links) self.assertIsNone(response.links.prev) - self.assertEqual(response.links.next, f"{self.mock_reverse_lazy('tasks')}?page=2&limit=1") + self.assertEqual( + response.links.next, f"{self.mock_reverse_lazy('tasks')}?page=2&limit=1&sort_by=createdAt&order=desc" + ) - @patch("todo.services.task_service.TaskRepository.get_all") - def test_get_tasks_returns_empty_response_if_no_tasks_present(self, mock_get_all: Mock): - mock_get_all.return_value = [] + @patch("todo.services.task_service.TaskRepository.count") + @patch("todo.services.task_service.TaskRepository.list") + def test_get_tasks_returns_empty_response_if_no_tasks_present(self, mock_list: Mock, mock_count: Mock): + mock_list.return_value = [] + mock_count.return_value = 0 - response: GetTasksResponse = TaskService.get_tasks(page=1, limit=10) + response: GetTasksResponse = TaskService.get_tasks( + page=1, limit=10, sort_by="createdAt", order="desc", user_id="test_user" + ) self.assertIsInstance(response, GetTasksResponse) self.assertEqual(len(response.tasks), 0) self.assertIsNone(response.links) - mock_get_all.assert_called_once() - - @patch("todo.services.task_service.Paginator") - @patch("todo.services.task_service.TaskRepository.get_all") - def test_get_tasks_returns_empty_response_when_page_exceeds_range(self, mock_get_all: Mock, mock_paginator: Mock): - mock_get_all.return_value = tasks_models + mock_list.assert_called_once_with(1, 10, "createdAt", "desc", "test_user", team_id=None, status_filter=None) + mock_count.assert_called_once() - mock_paginator_instance = MagicMock(spec=Paginator) - mock_paginator_instance.page.side_effect = EmptyPage("Empty page") - mock_paginator.return_value = mock_paginator_instance + @patch("todo.services.task_service.TaskRepository.count") + @patch("todo.services.task_service.TaskRepository.list") + def test_get_tasks_returns_empty_response_when_page_exceeds_range(self, mock_list: Mock, mock_count: Mock): + mock_list.return_value = [] + mock_count.return_value = 50 - response: GetTasksResponse = TaskService.get_tasks(page=999, limit=10) + response: GetTasksResponse = TaskService.get_tasks( + page=999, limit=10, sort_by="createdAt", order="desc", user_id="test_user" + ) self.assertIsInstance(response, GetTasksResponse) self.assertEqual(len(response.tasks), 0) self.assertIsNone(response.links) + @patch("todo.services.task_service.UserRepository.get_by_id") @patch("todo.services.task_service.LabelRepository.list_by_ids") - def test_prepare_task_dto_maps_model_to_dto(self, mock_label_repo: Mock): + def test_prepare_task_dto_maps_model_to_dto(self, mock_label_repo: Mock, mock_user_repo: Mock): task_model = tasks_models[0] mock_label_repo.return_value = label_models + mock_user_repo.return_value = self.get_user_model() result: TaskDTO = TaskService.prepare_task_dto(task_model) @@ -118,13 +144,16 @@ def test_prepare_task_dto_maps_model_to_dto(self, mock_label_repo: Mock): self.assertIsInstance(result, TaskDTO) self.assertEqual(result.id, str(task_model.id)) - def test_prepare_user_dto_maps_model_to_dto(self): - user_id = tasks_models[0].assignee + @patch("todo.services.task_service.UserRepository.get_by_id") + def test_prepare_user_dto_maps_model_to_dto(self, mock_user_repo: Mock): + user_id = self.user_id + mock_user_repo.return_value = self.get_user_model() + result: UserDTO = TaskService.prepare_user_dto(user_id) self.assertIsInstance(result, UserDTO) - self.assertEqual(result.id, user_id) - self.assertEqual(result.name, "SYSTEM") + self.assertEqual(result.id, str(user_id)) + self.assertEqual(result.name, self.user_data["name"]) def test_validate_pagination_params_with_valid_params(self): TaskService._validate_pagination_params(1, 10) @@ -143,8 +172,10 @@ def test_validate_pagination_params_with_invalid_limit(self): TaskService._validate_pagination_params(1, PaginationConfig.MAX_LIMIT + 1) self.assertIn(f"Maximum limit of {PaginationConfig.MAX_LIMIT}", str(context.exception)) - def test_prepare_label_dtos_converts_ids_to_dtos(self): + @patch("todo.services.task_service.UserRepository.get_by_id") + def test_prepare_label_dtos_converts_ids_to_dtos(self, mock_user_repo: Mock): label_ids = ["label_id_1", "label_id_2"] + mock_user_repo.return_value = self.get_user_model() with patch("todo.services.task_service.LabelRepository.list_by_ids") as mock_list_by_ids: mock_list_by_ids.return_value = label_models @@ -157,51 +188,946 @@ def test_prepare_label_dtos_converts_ids_to_dtos(self): mock_list_by_ids.assert_called_once_with(label_ids) - @patch("todo.services.task_service.Paginator") - @patch("todo.services.task_service.TaskRepository.get_all") - def test_get_tasks_handles_validation_error(self, mock_get_all: Mock, mock_paginator: Mock): - mock_get_all.return_value = tasks_models - + def test_get_tasks_handles_validation_error(self): with patch("todo.services.task_service.TaskService._validate_pagination_params") as mock_validate: mock_validate.side_effect = ValidationError("Test validation error") - response = TaskService.get_tasks(page=1, limit=10) + response = TaskService.get_tasks(page=1, limit=10, sort_by="createdAt", order="desc", user_id="test_user") self.assertIsInstance(response, GetTasksResponse) self.assertEqual(len(response.tasks), 0) self.assertIsNone(response.links) - @patch("todo.services.task_service.Paginator") - @patch("todo.services.task_service.TaskRepository.get_all") - def test_get_tasks_handles_general_exception(self, mock_get_all: Mock, mock_paginator: Mock): - mock_get_all.side_effect = Exception("Test general error") + @patch("todo.services.task_service.TaskRepository.list") + def test_get_tasks_handles_general_exception(self, mock_list: Mock): + mock_list.side_effect = Exception("Test general error") - response = TaskService.get_tasks(page=1, limit=10) + response = TaskService.get_tasks(page=1, limit=10, sort_by="createdAt", order="desc", user_id="test_user") self.assertIsInstance(response, GetTasksResponse) self.assertEqual(len(response.tasks), 0) self.assertIsNone(response.links) - @patch("todo.services.task_service.TaskRepository.create") + # @patch("todo.services.task_service.TaskRepository.create") + # @patch("todo.services.task_service.TaskService.prepare_task_dto") + # def test_create_task_successfully_creates_task(self, mock_prepare_dto, mock_create): + # dto = CreateTaskDTO( + # title="Test Task", + # description="This is a test", + # priority=TaskPriority.HIGH, + # status=TaskStatus.TODO, + # assignee={"assignee_id": str(self.user_id), "user_type": "user"}, + # createdBy=str(self.user_id), + # labels=[], + # dueAt=datetime.now(timezone.utc) + timedelta(days=1), + # ) + + # mock_task_model = MagicMock(spec=TaskModel) + # mock_task_model.id = ObjectId() + # mock_create.return_value = mock_task_model + # mock_task_dto = MagicMock(spec=TaskDTO) + # mock_prepare_dto.return_value = mock_task_dto + + # result = TaskService.create_task(dto) + + # mock_create.assert_called_once() + # created_task_model_arg = mock_create.call_args[0][0] + # self.assertIsNone(created_task_model_arg.deferredDetails) + # mock_prepare_dto.assert_called_once_with(mock_task_model, str(self.user_id)) + # self.assertEqual(result.data, mock_task_dto) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskService.prepare_task_dto") + def test_get_task_by_id_success(self, mock_prepare_task_dto: Mock, mock_repo_get_by_id: Mock): + task_id = "validtaskid123" + mock_task_model = MagicMock(spec=TaskModel) + mock_repo_get_by_id.return_value = mock_task_model + + mock_dto = MagicMock(spec=TaskDTO) + mock_prepare_task_dto.return_value = mock_dto + + result_dto = TaskService.get_task_by_id(task_id) + + mock_repo_get_by_id.assert_called_once_with(task_id) + mock_prepare_task_dto.assert_called_once_with(mock_task_model, user_id=None) + self.assertEqual(result_dto, mock_dto) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + def test_get_task_by_id_raises_task_not_found(self, mock_repo_get_by_id: Mock): + mock_repo_get_by_id.return_value = None + task_id = "6833661c84e8da308f27e0d55" + expected_message = ApiErrors.TASK_NOT_FOUND.format(task_id) + + with self.assertRaises(TaskNotFoundException) as context: + TaskService.get_task_by_id(task_id) + + self.assertEqual(str(context.exception), expected_message) + mock_repo_get_by_id.assert_called_once_with(task_id) + + @patch.object(TaskRepository, "get_by_id", side_effect=BsonInvalidId("Invalid ObjectId")) + def test_get_task_by_id_invalid_id_format(self, mock_get_by_id_repo_method: Mock): + invalid_id = "invalid_id_format" + + with self.assertRaises(BsonInvalidId) as context: + TaskService.get_task_by_id(invalid_id) + + self.assertEqual(str(context.exception), "Invalid ObjectId") + mock_get_by_id_repo_method.assert_called_once_with(invalid_id) + + @patch("todo.services.task_service.TaskRepository.delete_by_id") + def test_delete_task_success(self, mock_delete_by_id): + mock_delete_by_id.return_value = {"id": "123", "title": "Sample Task"} + result = TaskService.delete_task("123", str(self.user_id)) + self.assertIsNone(result) + + @patch("todo.services.task_service.TaskRepository.delete_by_id") + def test_delete_task_not_found(self, mock_delete_by_id): + mock_delete_by_id.return_value = None + with self.assertRaises(TaskNotFoundException): + TaskService.delete_task("nonexistent_id", str(self.user_id)) + + +class TaskServiceSortingTests(TestCase): + @patch("todo.services.task_service.TaskRepository.count") + @patch("todo.services.task_service.TaskRepository.list") + def test_get_tasks_default_sorting(self, mock_list, mock_count): + mock_list.return_value = [] + mock_count.return_value = 0 + + TaskService.get_tasks(page=1, limit=20, sort_by="createdAt", order="desc", user_id="test_user") + + mock_list.assert_called_once_with( + 1, 20, SORT_FIELD_CREATED_AT, SORT_ORDER_DESC, "test_user", team_id=None, status_filter=None + ) + + @patch("todo.services.task_service.TaskRepository.count") + @patch("todo.services.task_service.TaskRepository.list") + def test_get_tasks_explicit_sort_by_priority(self, mock_list, mock_count): + mock_list.return_value = [] + mock_count.return_value = 0 + + TaskService.get_tasks(page=1, limit=20, sort_by=SORT_FIELD_PRIORITY, order=SORT_ORDER_DESC, user_id="test_user") + + mock_list.assert_called_once_with( + 1, 20, SORT_FIELD_PRIORITY, SORT_ORDER_DESC, "test_user", team_id=None, status_filter=None + ) + + @patch("todo.services.task_service.TaskRepository.count") + @patch("todo.services.task_service.TaskRepository.list") + def test_get_tasks_sort_by_due_at_default_order(self, mock_list, mock_count): + mock_list.return_value = [] + mock_count.return_value = 0 + + TaskService.get_tasks(page=1, limit=20, sort_by=SORT_FIELD_DUE_AT, order="asc", user_id="test_user") + + mock_list.assert_called_once_with( + 1, 20, SORT_FIELD_DUE_AT, SORT_ORDER_ASC, "test_user", team_id=None, status_filter=None + ) + + @patch("todo.services.task_service.TaskRepository.count") + @patch("todo.services.task_service.TaskRepository.list") + def test_get_tasks_sort_by_priority_default_order(self, mock_list, mock_count): + mock_list.return_value = [] + mock_count.return_value = 0 + + TaskService.get_tasks(page=1, limit=20, sort_by=SORT_FIELD_PRIORITY, order="desc", user_id="test_user") + + mock_list.assert_called_once_with( + 1, 20, SORT_FIELD_PRIORITY, SORT_ORDER_DESC, "test_user", team_id=None, status_filter=None + ) + + @patch("todo.services.task_service.TaskRepository.count") + @patch("todo.services.task_service.TaskRepository.list") + def test_get_tasks_sort_by_assignee_default_order(self, mock_list, mock_count): + mock_list.return_value = [] + mock_count.return_value = 0 + + TaskService.get_tasks(page=1, limit=20, sort_by=SORT_FIELD_ASSIGNEE, order="asc", user_id="test_user") + + mock_list.assert_called_once_with( + 1, 20, SORT_FIELD_ASSIGNEE, SORT_ORDER_ASC, "test_user", team_id=None, status_filter=None + ) + + @patch("todo.services.task_service.TaskRepository.count") + @patch("todo.services.task_service.TaskRepository.list") + def test_get_tasks_sort_by_created_at_default_order(self, mock_list, mock_count): + mock_list.return_value = [] + mock_count.return_value = 0 + + TaskService.get_tasks(page=1, limit=20, sort_by=SORT_FIELD_CREATED_AT, order="desc", user_id="test_user") + + mock_list.assert_called_once_with( + 1, 20, SORT_FIELD_CREATED_AT, SORT_ORDER_DESC, "test_user", team_id=None, status_filter=None + ) + + @patch("todo.services.task_service.reverse_lazy", return_value="/v1/tasks") + def test_build_page_url_includes_sort_parameters(self, mock_reverse): + url = TaskService.build_page_url(2, 10, SORT_FIELD_PRIORITY, SORT_ORDER_DESC) + + expected_url = "/v1/tasks?page=2&limit=10&sort_by=priority&order=desc" + self.assertEqual(url, expected_url) + + @patch("todo.services.task_service.reverse_lazy", return_value="/v1/tasks") + def test_build_page_url_with_default_sort_parameters(self, mock_reverse): + url = TaskService.build_page_url(1, 20, SORT_FIELD_DUE_AT, "asc") + + expected_url = "/v1/tasks?page=1&limit=20&sort_by=dueAt&order=asc" + self.assertEqual(url, expected_url) + + @patch("todo.services.task_service.TaskRepository.count") + @patch("todo.services.task_service.TaskRepository.list") + def test_get_tasks_pagination_links_preserve_sort_params(self, mock_list, mock_count): + """Test that pagination links preserve sort parameters""" + from todo.tests.fixtures.task import tasks_models + + mock_user = MagicMock() + mock_user.name = "Test User" + + mock_list.return_value = [tasks_models[0]] + mock_count.return_value = 3 + + with ( + patch("todo.services.task_service.LabelRepository.list_by_ids", return_value=[]), + patch("todo.services.task_service.UserRepository.get_by_id", return_value=mock_user), + patch("todo.services.task_service.reverse_lazy", return_value="/v1/tasks"), + ): + response = TaskService.get_tasks( + page=2, limit=1, sort_by=SORT_FIELD_PRIORITY, order=SORT_ORDER_DESC, user_id="test_user" + ) + + self.assertIsNotNone(response.links) + self.assertIn("sort_by=priority", response.links.next) + self.assertIn("order=desc", response.links.next) + self.assertIn("sort_by=priority", response.links.prev) + self.assertIn("order=desc", response.links.prev) + + +class TaskServiceUpdateTests(TestCase): + def setUp(self): + self.task_id_str = str(ObjectId()) + self.user_id_str = str(ObjectId()) + self.default_task_model = TaskModel( + id=ObjectId(self.task_id_str), + displayId="#TSK1", + title="Original Task Title", + description="Original Description", + priority=TaskPriority.MEDIUM, + status=TaskStatus.TODO, + createdBy=self.user_id_str, + createdAt=datetime.now(timezone.utc) - timedelta(days=2), + ) + self.label_id_1_str = str(ObjectId()) + self.label_id_2_str = str(ObjectId()) + self.mock_label_1 = LabelModel( + id=PyObjectId(self.label_id_1_str), + name="Label One", + color="#FF0000", + createdBy="system", + createdAt=datetime.now(timezone.utc), + ) + self.mock_label_2 = LabelModel( + id=PyObjectId(self.label_id_2_str), + name="Label Two", + color="#00FF00", + createdBy="system", + createdAt=datetime.now(timezone.utc), + ) + + +@patch("todo.services.task_service.UserRepository.get_by_id") +@patch("todo.services.task_service.TaskRepository.get_by_id") +@patch("todo.services.task_service.TaskRepository.update") +@patch("todo.services.task_service.LabelRepository.list_by_ids") +@patch("todo.services.task_service.TaskService.prepare_task_dto") +def test_update_task_success_full_payload( + mock_prepare_dto, + mock_list_labels, + mock_repo_update, + mock_repo_get_by_id, + mock_user_get_by_id, +): + user_id_str = str(ObjectId()) + task_id_str = str(ObjectId()) + label_id_1_str = str(ObjectId()) + + mock_user_get_by_id.return_value = MagicMock() + + default_task_model = MagicMock(spec=TaskModel) + mock_repo_get_by_id.return_value = default_task_model + + updated_task_model_from_repo = default_task_model.model_copy(deep=True) + updated_task_model_from_repo.title = "Updated Title via Service" + updated_task_model_from_repo.status = TaskStatus.IN_PROGRESS + updated_task_model_from_repo.priority = TaskPriority.HIGH + updated_task_model_from_repo.description = "New Description" + # Remove assignee from task model since it's now in separate collection + updated_task_model_from_repo.dueAt = datetime.now(timezone.utc) + timedelta(days=5) + updated_task_model_from_repo.startedAt = datetime.now(timezone.utc) - timedelta(hours=2) + updated_task_model_from_repo.isAcknowledged = True + updated_task_model_from_repo.labels = [PyObjectId(label_id_1_str)] + updated_task_model_from_repo.updatedBy = user_id_str + updated_task_model_from_repo.updatedAt = datetime.now(timezone.utc) + + mock_repo_update.return_value = updated_task_model_from_repo + + mock_dto_response = MagicMock(spec=TaskDTO) + mock_prepare_dto.return_value = mock_dto_response + + mock_label = MagicMock() + mock_list_labels.return_value = [mock_label] + + validated_data_from_serializer = { + "title": "Updated Title via Service", + "description": "New Description", + "priority": TaskPriority.HIGH.name, + "status": TaskStatus.IN_PROGRESS.name, + "assignee": {"assignee_id": user_id_str, "user_type": "user"}, + "labels": [label_id_1_str], + "dueAt": updated_task_model_from_repo.dueAt, + "startedAt": updated_task_model_from_repo.startedAt, + "isAcknowledged": True, + } + + result_dto = TaskService.update_task(task_id_str, validated_data_from_serializer, user_id_str) + + mock_repo_get_by_id.assert_called_once_with(task_id_str) + mock_list_labels.assert_called_once_with([PyObjectId(label_id_1_str)]) + mock_repo_update.assert_called_once() + update_payload = mock_repo_update.call_args[0][1] + + assert update_payload["title"] == validated_data_from_serializer["title"] + assert update_payload["status"] == TaskStatus.IN_PROGRESS.value + assert update_payload["priority"] == TaskPriority.HIGH.value + assert update_payload["description"] == validated_data_from_serializer["description"] + # Remove assignee from payload since it's handled separately + assert update_payload["dueAt"] == validated_data_from_serializer["dueAt"] + assert update_payload["startedAt"] == validated_data_from_serializer["startedAt"] + assert update_payload["isAcknowledged"] == validated_data_from_serializer["isAcknowledged"] + assert update_payload["labels"] == [PyObjectId(label_id_1_str)] + assert update_payload["updatedBy"] == user_id_str + + mock_prepare_dto.assert_called_once_with(updated_task_model_from_repo) + assert result_dto == mock_dto_response + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskService.prepare_task_dto") + def test_update_task_no_actual_changes_returns_current_task_dto( + self, mock_prepare_dto, mock_repo_update, mock_repo_get_by_id + ): + mock_repo_get_by_id.return_value = self.default_task_model + mock_dto_response = MagicMock(spec=TaskDTO) + mock_prepare_dto.return_value = mock_dto_response + + validated_data_empty = {} + result_dto = TaskService.update_task(self.task_id_str, validated_data_empty, self.user_id_str) + + mock_repo_get_by_id.assert_called_once_with(self.task_id_str) + mock_repo_update.assert_not_called() + mock_prepare_dto.assert_called_once_with(self.default_task_model) + self.assertEqual(result_dto, mock_dto_response) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + def test_update_task_raises_task_not_found(self, mock_repo_get_by_id): + mock_repo_get_by_id.return_value = None + validated_data = {"title": "some update"} + + with self.assertRaises(TaskNotFoundException) as context: + TaskService.update_task(self.task_id_str, validated_data, self.user_id_str) + + self.assertEqual(str(context.exception), ApiErrors.TASK_NOT_FOUND.format(self.task_id_str)) + mock_repo_get_by_id.assert_called_once_with(self.task_id_str) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.LabelRepository.list_by_ids") + def test_update_task_raises_drf_validation_error_for_missing_labels(self, mock_list_labels, mock_repo_get_by_id): + mock_repo_get_by_id.return_value = self.default_task_model + mock_list_labels.return_value = [self.mock_label_1] + + label_id_non_existent = str(ObjectId()) + validated_data_with_bad_label = {"labels": [self.label_id_1_str, label_id_non_existent]} + + with self.assertRaises(DRFValidationError) as context: + TaskService.update_task(self.task_id_str, validated_data_with_bad_label, self.user_id_str) + + self.assertIn("labels", context.exception.detail) + self.assertIn( + ValidationErrors.MISSING_LABEL_IDS.format(label_id_non_existent), context.exception.detail["labels"] + ) + mock_repo_get_by_id.assert_called_once_with(self.task_id_str) + mock_list_labels.assert_called_once_with([PyObjectId(self.label_id_1_str), PyObjectId(label_id_non_existent)]) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + def test_update_task_raises_task_not_found_if_repo_update_fails(self, mock_repo_update, mock_repo_get_by_id): + mock_repo_get_by_id.return_value = self.default_task_model + mock_repo_update.return_value = None + + validated_data = {"title": "Updated Title"} + + with self.assertRaises(TaskNotFoundException) as context: + TaskService.update_task(self.task_id_str, validated_data, self.user_id_str) + + self.assertEqual(str(context.exception), ApiErrors.TASK_NOT_FOUND.format(self.task_id_str)) + mock_repo_get_by_id.assert_called_once_with(self.task_id_str) + mock_repo_update.assert_called_once_with(self.task_id_str, {**validated_data, "updatedBy": self.user_id_str}) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskService.prepare_task_dto") + def test_update_task_clears_labels_when_labels_is_none( + self, mock_prepare_dto, mock_repo_update, mock_repo_get_by_id + ): + mock_repo_get_by_id.return_value = self.default_task_model + updated_task_model_from_repo = self.default_task_model.model_copy(deep=True) + updated_task_model_from_repo.labels = [] + mock_repo_update.return_value = updated_task_model_from_repo + mock_prepare_dto.return_value = MagicMock(spec=TaskDTO) + + validated_data = {"labels": None} + TaskService.update_task(self.task_id_str, validated_data, self.user_id_str) + + _, kwargs_update = mock_repo_update.call_args + update_payload = mock_repo_update.call_args[0][1] + self.assertEqual(update_payload["labels"], []) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.LabelRepository.list_by_ids") + @patch("todo.services.task_service.TaskService.prepare_task_dto") + def test_update_task_sets_empty_labels_list_when_labels_is_empty_list( + self, mock_prepare_dto, mock_list_labels, mock_repo_update, mock_repo_get_by_id + ): + mock_repo_get_by_id.return_value = self.default_task_model + updated_task_model_from_repo = self.default_task_model.model_copy(deep=True) + updated_task_model_from_repo.labels = [] + mock_repo_update.return_value = updated_task_model_from_repo + mock_prepare_dto.return_value = MagicMock(spec=TaskDTO) + mock_list_labels.return_value = [] + + validated_data = {"labels": []} + TaskService.update_task(self.task_id_str, validated_data, self.user_id_str) + + update_payload_sent_to_repo = mock_repo_update.call_args[0][1] + self.assertEqual(update_payload_sent_to_repo["labels"], []) + mock_list_labels.assert_not_called() + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskService.prepare_task_dto") + def test_update_task_converts_priority_and_status_names_to_values( + self, mock_prepare_dto, mock_repo_update, mock_repo_get_by_id + ): + mock_repo_get_by_id.return_value = self.default_task_model + updated_task_model_from_repo = self.default_task_model.model_copy(deep=True) + mock_repo_update.return_value = updated_task_model_from_repo + mock_prepare_dto.return_value = MagicMock(spec=TaskDTO) + + validated_data = {"priority": TaskPriority.LOW.name, "status": TaskStatus.DONE.name} + TaskService.update_task(self.task_id_str, validated_data, self.user_id_str) + + update_payload_sent_to_repo = mock_repo_update.call_args[0][1] + self.assertEqual(update_payload_sent_to_repo["priority"], TaskPriority.LOW.value) + self.assertEqual(update_payload_sent_to_repo["status"], TaskStatus.DONE.value) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") @patch("todo.services.task_service.TaskService.prepare_task_dto") - def test_create_task_successfully_creates_task(self, mock_prepare_dto, mock_create): + def test_update_task_handles_null_priority_and_status( + self, mock_prepare_dto, mock_repo_update, mock_repo_get_by_id + ): + mock_repo_get_by_id.return_value = self.default_task_model + updated_task_model_from_repo = self.default_task_model.model_copy(deep=True) + mock_repo_update.return_value = updated_task_model_from_repo + mock_prepare_dto.return_value = MagicMock(spec=TaskDTO) + + validated_data = {"priority": None, "status": None} + TaskService.update_task(self.task_id_str, validated_data, self.user_id_str) + + update_payload_sent_to_repo = mock_repo_update.call_args[0][1] + self.assertIsNone(update_payload_sent_to_repo["priority"]) + self.assertIsNone(update_payload_sent_to_repo["status"]) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskRepository._get_assigned_task_ids_for_user") + def test_update_task_permission_denied_if_not_creator_or_assignee( + self, mock_get_assigned, mock_update, mock_get_by_id + ): + task_id = self.task_id_str + user_id = "not_creator_or_assignee" + task_model = self.default_task_model.model_copy(deep=True) + task_model.createdBy = "some_other_user" + mock_get_by_id.return_value = task_model + mock_get_assigned.return_value = [] + validated_data = {"title": "new title"} + with self.assertRaises(PermissionError) as context: + TaskService.update_task(task_id, validated_data, user_id) + self.assertEqual(str(context.exception), ApiErrors.UNAUTHORIZED_TITLE) + mock_get_by_id.assert_called_once_with(task_id) + mock_get_assigned.assert_called_once_with(user_id) + mock_update.assert_not_called() + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskRepository._get_assigned_task_ids_for_user") + def test_update_task_permission_allowed_if_assignee(self, mock_get_assigned, mock_update, mock_get_by_id): + task_id = self.task_id_str + user_id = "assignee_user" + task_model = self.default_task_model.model_copy(deep=True) + task_model.createdBy = "some_other_user" + mock_get_by_id.return_value = task_model + mock_get_assigned.return_value = [task_model.id] + mock_update.return_value = task_model + validated_data = {"title": "new title"} + TaskService.update_task(task_id, validated_data, user_id) + mock_get_by_id.assert_called_once_with(task_id) + mock_get_assigned.assert_called_once_with(user_id) + mock_update.assert_called_once() + + +class TaskServiceUpdateWithAssigneeTests(TestCase): + def setUp(self): + self.task_id_str = str(ObjectId()) + self.user_id_str = str(ObjectId()) + self.assignee_id_str = str(ObjectId()) + self.default_task_model = TaskModel( + id=ObjectId(self.task_id_str), + displayId="#TSK1", + title="Original Task Title", + description="Original Description", + priority=TaskPriority.MEDIUM, + status=TaskStatus.TODO, + createdBy=self.user_id_str, + createdAt=datetime.now(timezone.utc) - timedelta(days=2), + ) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskAssignmentRepository.update_assignment") + @patch("todo.services.task_service.UserRepository.get_by_id") + @patch("todo.services.task_service.TaskService.prepare_task_dto") + def test_update_task_with_assignee_success( + self, mock_prepare_dto, mock_user_get_by_id, mock_update_assignment, mock_repo_update, mock_repo_get_by_id + ): + mock_user_get_by_id.return_value = MagicMock() + mock_repo_get_by_id.return_value = self.default_task_model + + updated_task_model = self.default_task_model.model_copy(deep=True) + updated_task_model.title = "Updated Title" + updated_task_model.status = TaskStatus.IN_PROGRESS + mock_repo_update.return_value = updated_task_model + + mock_update_assignment.return_value = MagicMock() + + mock_dto_response = MagicMock(spec=TaskDTO) + mock_prepare_dto.return_value = mock_dto_response + + # Create DTO with task and assignee updates dto = CreateTaskDTO( - title="Test Task", - description="This is a test", - priority=TaskPriority.HIGH, + title="Updated Title", + status=TaskStatus.IN_PROGRESS.name, + assignee={"assignee_id": self.assignee_id_str, "user_type": "user"}, + createdBy=self.user_id_str, + ) + + result_dto = TaskService.update_task_with_assignee(self.task_id_str, dto, self.user_id_str) + + mock_repo_get_by_id.assert_called_once_with(self.task_id_str) + mock_user_get_by_id.assert_called_once_with(self.assignee_id_str) + mock_repo_update.assert_called_once() + mock_update_assignment.assert_called_once_with(self.task_id_str, self.assignee_id_str, "user", self.user_id_str) + mock_prepare_dto.assert_called_once_with(updated_task_model, self.user_id_str) + + self.assertEqual(result_dto, mock_dto_response) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskAssignmentRepository.update_assignment") + @patch("todo.services.task_service.TeamRepository.get_by_id") + @patch("todo.services.task_service.TaskService.prepare_task_dto") + def test_update_task_with_team_assignee_success( + self, mock_prepare_dto, mock_team_get_by_id, mock_update_assignment, mock_repo_update, mock_repo_get_by_id + ): + mock_team_get_by_id.return_value = MagicMock() + mock_repo_get_by_id.return_value = self.default_task_model + + updated_task_model = self.default_task_model.model_copy(deep=True) + updated_task_model.title = "Updated Title" + mock_repo_update.return_value = updated_task_model + + mock_update_assignment.return_value = MagicMock() + + mock_dto_response = MagicMock(spec=TaskDTO) + mock_prepare_dto.return_value = mock_dto_response + + # Create DTO with team assignee + dto = CreateTaskDTO( + title="Updated Title", + assignee={"assignee_id": self.assignee_id_str, "user_type": "team"}, + createdBy=self.user_id_str, + ) + + result_dto = TaskService.update_task_with_assignee(self.task_id_str, dto, self.user_id_str) + + mock_team_get_by_id.assert_called_once_with(self.assignee_id_str) + mock_update_assignment.assert_called_once_with(self.task_id_str, self.assignee_id_str, "team", self.user_id_str) + + self.assertEqual(result_dto, mock_dto_response) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + def test_update_task_with_assignee_task_not_found(self, mock_repo_get_by_id): + mock_repo_get_by_id.return_value = None + + dto = CreateTaskDTO(title="Updated Title", createdBy=self.user_id_str) + + with self.assertRaises(TaskNotFoundException) as context: + TaskService.update_task_with_assignee(self.task_id_str, dto, self.user_id_str) + + self.assertEqual(str(context.exception), ApiErrors.TASK_NOT_FOUND.format(self.task_id_str)) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository._get_assigned_task_ids_for_user") + def test_update_task_with_assignee_permission_denied(self, mock_get_assigned, mock_repo_get_by_id): + task_model = self.default_task_model.model_copy(deep=True) + task_model.createdBy = "different_user" + mock_repo_get_by_id.return_value = task_model + mock_get_assigned.return_value = [] + + dto = CreateTaskDTO(title="Updated Title", createdBy=self.user_id_str) + + with self.assertRaises(PermissionError) as context: + TaskService.update_task_with_assignee(self.task_id_str, dto, self.user_id_str) + + self.assertEqual(str(context.exception), ApiErrors.UNAUTHORIZED_TITLE) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.UserRepository.get_by_id") + def test_update_task_with_assignee_user_not_found(self, mock_user_get_by_id, mock_repo_get_by_id): + mock_repo_get_by_id.return_value = self.default_task_model + mock_user_get_by_id.return_value = None + + dto = CreateTaskDTO( + title="Test Title", + assignee={"assignee_id": self.assignee_id_str, "user_type": "user"}, + createdBy=self.user_id_str, + ) + + with self.assertRaises(UserNotFoundException) as context: + TaskService.update_task_with_assignee(self.task_id_str, dto, self.user_id_str) + + self.assertEqual(str(context.exception), ApiErrors.USER_NOT_FOUND.format(self.assignee_id_str)) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TeamRepository.get_by_id") + def test_update_task_with_assignee_team_not_found(self, mock_team_get_by_id, mock_repo_get_by_id): + mock_repo_get_by_id.return_value = self.default_task_model + mock_team_get_by_id.return_value = None + + dto = CreateTaskDTO( + title="Test Title", + assignee={"assignee_id": self.assignee_id_str, "user_type": "team"}, + createdBy=self.user_id_str, + ) + + with self.assertRaises(ValueError) as context: + TaskService.update_task_with_assignee(self.task_id_str, dto, self.user_id_str) + + self.assertEqual(str(context.exception), f"Team not found: {self.assignee_id_str}") + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskService.prepare_task_dto") + def test_update_task_with_assignee_started_at_logic(self, mock_prepare_dto, mock_repo_update, mock_repo_get_by_id): + mock_repo_get_by_id.return_value = self.default_task_model + + updated_task_model = self.default_task_model.model_copy(deep=True) + updated_task_model.startedAt = datetime.now(timezone.utc) + mock_repo_update.return_value = updated_task_model + + mock_dto_response = MagicMock(spec=TaskDTO) + mock_prepare_dto.return_value = mock_dto_response + + # DTO with IN_PROGRESS status + dto = CreateTaskDTO( + title="Test Title", + status=TaskStatus.IN_PROGRESS.name, + createdBy=self.user_id_str, + ) + + result_dto = TaskService.update_task_with_assignee(self.task_id_str, dto, self.user_id_str) + + # Check that startedAt was set in the update payload + update_payload = mock_repo_update.call_args[0][1] + self.assertIn("startedAt", update_payload) + self.assertIsInstance(update_payload["startedAt"], datetime) + + self.assertEqual(result_dto, mock_dto_response) + + +class TaskServiceUpdateWithAssigneeFromDictTests(TestCase): + def setUp(self): + self.task_id_str = str(ObjectId()) + self.user_id_str = str(ObjectId()) + self.assignee_id_str = str(ObjectId()) + self.default_task_model = TaskModel( + id=ObjectId(self.task_id_str), + displayId="#TSK1", + title="Original Task Title", + description="Original Description", + priority=TaskPriority.MEDIUM, status=TaskStatus.TODO, - assignee="user123", - labels=[], - dueAt=datetime.now(timezone.utc) + timedelta(days=1), + createdBy=self.user_id_str, + createdAt=datetime.now(timezone.utc) - timedelta(days=2), ) - mock_task_model = MagicMock(spec=TaskModel) - mock_create.return_value = mock_task_model - mock_task_dto = MagicMock(spec=TaskDTO) - mock_prepare_dto.return_value = mock_task_dto + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskAssignmentRepository.update_assignment") + @patch("todo.services.task_service.UserRepository.get_by_id") + @patch("todo.services.task_service.TaskService.prepare_task_dto") + def test_update_task_with_assignee_from_dict_success( + self, mock_prepare_dto, mock_user_get_by_id, mock_update_assignment, mock_repo_update, mock_repo_get_by_id + ): + mock_user_get_by_id.return_value = MagicMock() + mock_repo_get_by_id.return_value = self.default_task_model + + updated_task_model = self.default_task_model.model_copy(deep=True) + updated_task_model.title = "Updated Title" + updated_task_model.status = TaskStatus.IN_PROGRESS + mock_repo_update.return_value = updated_task_model + + mock_update_assignment.return_value = MagicMock() + + mock_dto_response = MagicMock(spec=TaskDTO) + mock_prepare_dto.return_value = mock_dto_response + + # Validated data with task and assignee updates + validated_data = { + "title": "Updated Title", + "status": TaskStatus.IN_PROGRESS.name, + "assignee": {"assignee_id": self.assignee_id_str, "user_type": "user"}, + } + + result_dto = TaskService.update_task_with_assignee_from_dict(self.task_id_str, validated_data, self.user_id_str) + + mock_repo_get_by_id.assert_called_once_with(self.task_id_str) + mock_user_get_by_id.assert_called_once_with(self.assignee_id_str) + mock_repo_update.assert_called_once() + mock_update_assignment.assert_called_once_with(self.task_id_str, self.assignee_id_str, "user", self.user_id_str) + mock_prepare_dto.assert_called_once_with(updated_task_model, self.user_id_str) + + self.assertEqual(result_dto, mock_dto_response) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.UserRepository.get_by_id") + @patch("todo.services.task_service.TaskService.prepare_task_dto") + def test_update_task_with_assignee_from_dict_partial_update_only_assignee( + self, mock_prepare_dto, mock_user_get_by_id, mock_repo_get_by_id + ): + mock_repo_get_by_id.return_value = self.default_task_model + mock_user_get_by_id.return_value = MagicMock() + mock_dto_response = MagicMock(spec=TaskDTO) + mock_prepare_dto.return_value = mock_dto_response + + # Only update assignee, no task fields + validated_data = { + "assignee": {"assignee_id": self.assignee_id_str, "user_type": "user"}, + } + + result_dto = TaskService.update_task_with_assignee_from_dict(self.task_id_str, validated_data, self.user_id_str) + + # Should not call update since no task fields changed + mock_prepare_dto.assert_called_once_with(self.default_task_model, self.user_id_str) + self.assertEqual(result_dto, mock_dto_response) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskService.prepare_task_dto") + def test_update_task_with_assignee_from_dict_partial_update_only_title( + self, mock_prepare_dto, mock_repo_update, mock_repo_get_by_id + ): + mock_repo_get_by_id.return_value = self.default_task_model + + updated_task_model = self.default_task_model.model_copy(deep=True) + updated_task_model.title = "New Title" + mock_repo_update.return_value = updated_task_model + + mock_dto_response = MagicMock(spec=TaskDTO) + mock_prepare_dto.return_value = mock_dto_response + + # Only update title, no assignee + validated_data = { + "title": "New Title", + } + + result_dto = TaskService.update_task_with_assignee_from_dict(self.task_id_str, validated_data, self.user_id_str) + + mock_repo_update.assert_called_once() + mock_prepare_dto.assert_called_once_with(updated_task_model, self.user_id_str) + self.assertEqual(result_dto, mock_dto_response) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + def test_update_task_with_assignee_from_dict_task_not_found(self, mock_repo_get_by_id): + mock_repo_get_by_id.return_value = None + + validated_data = {"title": "Updated Title"} - result = TaskService.create_task(dto) + with self.assertRaises(TaskNotFoundException) as context: + TaskService.update_task_with_assignee_from_dict(self.task_id_str, validated_data, self.user_id_str) - mock_create.assert_called_once() - mock_prepare_dto.assert_called_once_with(mock_task_model) - self.assertEqual(result.data, mock_task_dto) + self.assertEqual(str(context.exception), ApiErrors.TASK_NOT_FOUND.format(self.task_id_str)) + + +class TaskServiceDeferTests(TestCase): + def setUp(self): + self.task_id = str(ObjectId()) + self.user_id = "system_user" + self.current_time = datetime.now(timezone.utc) + self.due_at = self.current_time + timedelta(days=30) + self.task_model = TaskModel( + id=self.task_id, + displayId="TASK-1", + title="Test Task", + description="A task for testing deferral.", + dueAt=self.due_at, + createdAt=self.current_time - timedelta(days=1), + createdBy=self.user_id, + ) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskService.prepare_task_dto") + def test_defer_task_success(self, mock_prepare_dto, mock_repo_update, mock_repo_get_by_id): + mock_repo_get_by_id.return_value = self.task_model + deferred_till = self.current_time + timedelta(days=5) + + mock_updated_task = MagicMock() + mock_repo_update.return_value = mock_updated_task + mock_dto = MagicMock() + mock_prepare_dto.return_value = mock_dto + + result_dto = TaskService.defer_task(self.task_id, deferred_till, self.user_id) + + self.assertEqual(result_dto, mock_dto) + mock_repo_get_by_id.assert_called_once_with(self.task_id) + mock_repo_update.assert_called_once() + mock_prepare_dto.assert_called_once_with(mock_updated_task, "system_user") + + update_call_args = mock_repo_update.call_args[0] + self.assertEqual(update_call_args[0], self.task_id) + update_payload = update_call_args[1] + self.assertEqual(update_payload["updatedBy"], self.user_id) + self.assertIn("deferredDetails", update_payload) + self.assertEqual(update_payload["deferredDetails"]["deferredTill"], deferred_till) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + def test_defer_task_too_close_to_due_date_raises_exception(self, mock_repo_get_by_id): + mock_repo_get_by_id.return_value = self.task_model + deferred_till = self.due_at + timedelta(days=1) + + with self.assertRaises(UnprocessableEntityException): + TaskService.defer_task(self.task_id, deferred_till, self.user_id) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskService.prepare_task_dto") + def test_defer_task_without_due_date_success(self, mock_prepare_dto, mock_repo_update, mock_repo_get_by_id): + self.task_model.dueAt = None + mock_repo_get_by_id.return_value = self.task_model + deferred_till = self.current_time + timedelta(days=20) + mock_repo_update.return_value = MagicMock(spec=TaskModel) + + TaskService.defer_task(self.task_id, deferred_till, self.user_id) + + mock_repo_update.assert_called_once() + mock_prepare_dto.assert_called_once() + update_payload = mock_repo_update.call_args[0][1] + self.assertEqual(update_payload["deferredDetails"]["deferredTill"], deferred_till) + mock_repo_get_by_id.assert_called_once_with(self.task_id) + + @patch("todo.services.task_service.TaskRepository.get_by_id") + def test_defer_task_raises_task_not_found(self, mock_repo_get_by_id): + mock_repo_get_by_id.return_value = None + deferred_till = self.current_time + timedelta(days=5) + + with self.assertRaises(TaskNotFoundException): + TaskService.defer_task(self.task_id, deferred_till, self.user_id) + + mock_repo_get_by_id.assert_called_once_with(self.task_id) + + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskRepository.get_by_id") + def test_defer_task_raises_task_not_found_on_update_failure(self, mock_repo_get_by_id, mock_repo_update): + mock_repo_get_by_id.return_value = self.task_model + mock_repo_update.return_value = None + valid_deferred_till = self.current_time + timedelta(days=5) + + with self.assertRaises(TaskNotFoundException) as context: + TaskService.defer_task(self.task_id, valid_deferred_till, self.user_id) + + self.assertEqual(str(context.exception), ApiErrors.TASK_NOT_FOUND.format(self.task_id)) + mock_repo_get_by_id.assert_called_once_with(self.task_id) + mock_repo_update.assert_called_once() + + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskRepository.get_by_id") + def test_defer_task_on_done_task_raises_conflict(self, mock_repo_get_by_id, mock_repo_update): + done_task = TaskModel( + id=self.task_id, + displayId="#1", + title="Completed Task", + status=TaskStatus.DONE.value, + createdAt=datetime.now(timezone.utc), + createdBy=str(ObjectId()), + ) + mock_repo_get_by_id.return_value = done_task + valid_deferred_till = datetime.now(timezone.utc) + timedelta(days=5) + + with self.assertRaises(TaskStateConflictException) as context: + TaskService.defer_task(self.task_id, valid_deferred_till, done_task.createdBy) + + self.assertEqual(str(context.exception), ValidationErrors.CANNOT_DEFER_A_DONE_TASK) + mock_repo_get_by_id.assert_called_once_with(self.task_id) + mock_repo_update.assert_not_called() + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository.update") + @patch("todo.services.task_service.TaskRepository._get_assigned_task_ids_for_user") + def test_defer_task_permission_denied_if_not_creator_or_assignee( + self, mock_get_assigned, mock_update, mock_get_by_id + ): + task_id = self.task_id + user_id = "not_creator_or_assignee" + task_model = self.task_model + task_model.createdBy = "some_other_user" + mock_get_by_id.return_value = task_model + mock_get_assigned.return_value = [] + deferred_till = self.current_time + timedelta(days=5) + with self.assertRaises(PermissionError) as context: + TaskService.defer_task(task_id, deferred_till, user_id) + self.assertEqual(str(context.exception), ApiErrors.UNAUTHORIZED_TITLE) + mock_get_by_id.assert_called_once_with(task_id) + mock_get_assigned.assert_called_once_with(user_id) + mock_update.assert_not_called() + + @patch("todo.services.task_service.TaskRepository.get_by_id") + @patch("todo.services.task_service.TaskRepository._get_assigned_task_ids_for_user") + @patch("todo.services.task_service.TaskRepository.delete_by_id") + def test_delete_task_permission_denied_if_not_creator_or_assignee( + self, mock_delete_by_id, mock_get_assigned, mock_get_by_id + ): + task_id = str(ObjectId()) + user_id = "not_creator_or_assignee" + task_model = MagicMock() + task_model.createdBy = "some_other_user" + task_model.id = ObjectId(task_id) + mock_get_by_id.return_value = task_model + mock_get_assigned.return_value = [] + mock_delete_by_id.side_effect = PermissionError(ApiErrors.UNAUTHORIZED_TITLE) + with self.assertRaises(PermissionError) as context: + TaskService.delete_task(task_id, user_id) + self.assertEqual(str(context.exception), ApiErrors.UNAUTHORIZED_TITLE) + mock_get_by_id.assert_not_called() + mock_get_assigned.assert_not_called() + mock_delete_by_id.assert_called_once_with(task_id, user_id) diff --git a/todo/tests/unit/services/test_team_service.py b/todo/tests/unit/services/test_team_service.py new file mode 100644 index 00000000..4e66e66e --- /dev/null +++ b/todo/tests/unit/services/test_team_service.py @@ -0,0 +1,187 @@ +from unittest import TestCase +from unittest.mock import patch +from datetime import datetime, timezone + +from todo.services.team_service import TeamService +from todo.dto.responses.get_user_teams_response import GetUserTeamsResponse +from todo.models.team import TeamModel, UserTeamDetailsModel +from todo.models.common.pyobjectid import PyObjectId + + +class TeamServiceTests(TestCase): + def setUp(self): + self.user_id = "507f1f77bcf86cd799439011" + self.team_id = "507f1f77bcf86cd799439012" + + # Mock team model + self.team_model = TeamModel( + id=PyObjectId(self.team_id), + name="Test Team", + description="Test Description", + poc_id=PyObjectId(self.user_id), + invite_code="TEST123", + created_by=PyObjectId(self.user_id), + updated_by=PyObjectId(self.user_id), + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + # Mock user team details model + self.user_team_details = UserTeamDetailsModel( + id=PyObjectId("507f1f77bcf86cd799439013"), + user_id=PyObjectId(self.user_id), + team_id=PyObjectId(self.team_id), + role_id="1", + is_active=True, + created_by=PyObjectId(self.user_id), + updated_by=PyObjectId(self.user_id), + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + @patch("todo.services.team_service.TeamRepository.get_by_id") + @patch("todo.services.team_service.UserTeamDetailsRepository.get_by_user_id") + def test_get_user_teams_success(self, mock_get_by_user_id, mock_get_team_by_id): + """Test successful retrieval of user teams""" + # Mock repository responses + mock_get_by_user_id.return_value = [self.user_team_details] + mock_get_team_by_id.return_value = self.team_model + + # Call service method + response = TeamService.get_user_teams(self.user_id) + + # Assertions + self.assertIsInstance(response, GetUserTeamsResponse) + self.assertEqual(response.total, 1) + self.assertEqual(len(response.teams), 1) + self.assertEqual(response.teams[0].name, "Test Team") + self.assertEqual(response.teams[0].id, self.team_id) + + # Verify repository calls + mock_get_by_user_id.assert_called_once_with(self.user_id) + mock_get_team_by_id.assert_called_once_with(self.team_id) + + @patch("todo.services.team_service.UserTeamDetailsRepository.get_by_user_id") + def test_get_user_teams_no_teams(self, mock_get_by_user_id): + """Test when user has no teams""" + mock_get_by_user_id.return_value = [] + + response = TeamService.get_user_teams(self.user_id) + + self.assertIsInstance(response, GetUserTeamsResponse) + self.assertEqual(response.total, 0) + self.assertEqual(len(response.teams), 0) + + @patch("todo.services.team_service.TeamRepository.get_by_id") + @patch("todo.services.team_service.UserTeamDetailsRepository.get_by_user_id") + def test_get_user_teams_team_not_found(self, mock_get_by_user_id, mock_get_team_by_id): + """Test when team is not found for user team relationship""" + mock_get_by_user_id.return_value = [self.user_team_details] + mock_get_team_by_id.return_value = None # Team not found + + response = TeamService.get_user_teams(self.user_id) + + self.assertIsInstance(response, GetUserTeamsResponse) + self.assertEqual(response.total, 0) + self.assertEqual(len(response.teams), 0) + + @patch("todo.services.team_service.UserTeamDetailsRepository.get_by_user_id") + def test_get_user_teams_repository_error(self, mock_get_by_user_id): + """Test when repository throws an exception""" + mock_get_by_user_id.side_effect = Exception("Database error") + + with self.assertRaises(ValueError) as context: + TeamService.get_user_teams(self.user_id) + + self.assertIn("Failed to get user teams", str(context.exception)) + + @patch("todo.services.user_role_service.UserRoleService.assign_role") + @patch("todo.services.team_service.AuditLogRepository.create") + @patch("todo.services.team_service.TeamCreationInviteCodeRepository.validate_and_consume_code") + @patch("todo.services.team_service.TeamRepository.create") + @patch("todo.services.team_service.UserTeamDetailsRepository.create_many") + @patch("todo.dto.team_dto.UserRepository.get_by_id") + def test_creator_always_added_as_member( + self, + mock_user_get_by_id, + mock_create_many, + mock_team_create, + mock_validate_and_consume_code, + mock_audit_log_create, + mock_assign_role, + ): + """Test that the creator is always added as a member when creating a team""" + # Patch user lookup to always return a mock user + mock_user = type( + "User", + (), + {"id": None, "name": "Test User", "email_id": "test@example.com", "created_at": None, "updated_at": None}, + )() + mock_user_get_by_id.return_value = mock_user + + mock_validate_and_consume_code.return_value = {"_id": "507f1f77bcf86cd799439013"} + # Creator is not in member_ids or as POC + creator_id = "507f1f77bcf86cd799439099" + member_ids = ["507f1f77bcf86cd799439011"] + poc_id = "507f1f77bcf86cd799439012" + from todo.dto.team_dto import CreateTeamDTO + + dto = CreateTeamDTO( + name="Team With Creator", + description="desc", + member_ids=member_ids, + poc_id=poc_id, + team_invite_code="TEST123", + ) + # Mock team creation + mock_team = self.team_model + mock_team_create.return_value = mock_team + # Call create_team + TeamService.create_team(dto, creator_id) + # Check that creator_id is in the user_team relationships + user_team_objs = mock_create_many.call_args[0][0] + all_user_ids = [str(obj.user_id) for obj in user_team_objs] + self.assertIn(creator_id, all_user_ids) + + @patch("todo.services.user_role_service.UserRoleService.assign_role") + @patch("todo.services.team_service.AuditLogRepository.create") + @patch("todo.services.team_service.TeamRepository.get_by_invite_code") + @patch("todo.services.team_service.UserTeamDetailsRepository.get_by_user_id") + @patch("todo.services.team_service.UserTeamDetailsRepository.create") + def test_join_team_by_invite_code_success( + self, mock_create, mock_get_by_user_id, mock_get_by_invite_code, mock_audit_log_create, mock_assign_role + ): + """Test successful join by invite code""" + mock_get_by_invite_code.return_value = self.team_model + mock_get_by_user_id.return_value = [] # Not a member yet + mock_create.return_value = self.user_team_details + + from todo.services.team_service import TeamService + + team_dto = TeamService.join_team_by_invite_code("TEST123", self.user_id) + self.assertEqual(team_dto.id, self.team_id) + self.assertEqual(team_dto.name, "Test Team") + mock_get_by_invite_code.assert_called_once_with("TEST123") + mock_create.assert_called_once() + + @patch("todo.services.team_service.TeamRepository.get_by_invite_code") + def test_join_team_by_invite_code_invalid_code(self, mock_get_by_invite_code): + """Test join by invite code with invalid code""" + mock_get_by_invite_code.return_value = None + from todo.services.team_service import TeamService + + with self.assertRaises(ValueError) as context: + TeamService.join_team_by_invite_code("INVALID", self.user_id) + self.assertIn("Invalid invite code", str(context.exception)) + + @patch("todo.services.team_service.TeamRepository.get_by_invite_code") + @patch("todo.services.team_service.UserTeamDetailsRepository.get_by_user_id") + def test_join_team_by_invite_code_already_member(self, mock_get_by_user_id, mock_get_by_invite_code): + """Test join by invite code when already a member""" + mock_get_by_invite_code.return_value = self.team_model + mock_get_by_user_id.return_value = [self.user_team_details] # Already a member + from todo.services.team_service import TeamService + + with self.assertRaises(ValueError) as context: + TeamService.join_team_by_invite_code("TEST123", self.user_id) + self.assertIn("already a member", str(context.exception)) diff --git a/todo/tests/unit/services/test_user_service.py b/todo/tests/unit/services/test_user_service.py new file mode 100644 index 00000000..5763d87b --- /dev/null +++ b/todo/tests/unit/services/test_user_service.py @@ -0,0 +1,92 @@ +from unittest import TestCase +from unittest.mock import patch +from rest_framework.exceptions import ValidationError as DRFValidationError + +from todo.services.user_service import UserService +from todo.models.user import UserModel +from todo.exceptions.auth_exceptions import UserNotFoundException, APIException +from todo.tests.fixtures.user import users_db_data +from todo.constants.messages import ValidationErrors, RepositoryErrors + + +class UserServiceTests(TestCase): + def setUp(self) -> None: + self.valid_google_user_data = { + "google_id": "123456789", + "email": "test@example.com", + "name": "Test User", + "picture": "https://example.com/picture.jpg", + } + self.user_model = UserModel(**users_db_data[0]) + + @patch("todo.services.user_service.UserRepository") + def test_create_or_update_user_success(self, mock_repository): + mock_repository.create_or_update.return_value = self.user_model + + result = UserService.create_or_update_user(self.valid_google_user_data) + + mock_repository.create_or_update.assert_called_once_with(self.valid_google_user_data) + self.assertEqual(result, self.user_model) + + @patch("todo.services.user_service.UserRepository") + def test_create_or_update_user_validation_error(self, mock_repository): + invalid_data = {"google_id": "123"} + + with self.assertRaises(DRFValidationError) as context: + UserService.create_or_update_user(invalid_data) + self.assertIn(ValidationErrors.MISSING_EMAIL, str(context.exception.detail)) + self.assertIn(ValidationErrors.MISSING_NAME, str(context.exception.detail)) + mock_repository.create_or_update.assert_not_called() + + @patch("todo.services.user_service.UserRepository") + def test_create_or_update_user_repository_error(self, mock_repository): + mock_repository.create_or_update.side_effect = Exception("Database error") + + with self.assertRaises(APIException) as context: + UserService.create_or_update_user(self.valid_google_user_data) + self.assertIn(RepositoryErrors.USER_CREATE_UPDATE_FAILED.format("Database error"), str(context.exception)) + + @patch("todo.services.user_service.UserRepository") + def test_get_user_by_id_success(self, mock_repository): + mock_repository.get_by_id.return_value = self.user_model + + result = UserService.get_user_by_id("123") + + mock_repository.get_by_id.assert_called_once_with("123") + self.assertEqual(result, self.user_model) + + @patch("todo.services.user_service.UserRepository") + def test_get_user_by_id_not_found(self, mock_repository): + mock_repository.get_by_id.return_value = None + + with self.assertRaises(UserNotFoundException): + UserService.get_user_by_id("123") + mock_repository.get_by_id.assert_called_once_with("123") + + def test_validate_google_user_data_success(self): + try: + UserService._validate_google_user_data(self.valid_google_user_data) + except DRFValidationError: + self.fail("ValidationError raised unexpectedly!") + + def test_validate_google_user_data_missing_fields(self): + test_cases = [ + {"email": "test@example.com", "name": "Test User"}, + {"google_id": "123", "name": "Test User"}, + {"google_id": "123", "email": "test@example.com"}, + ] + + for invalid_data in test_cases: + with self.subTest(f"Testing missing field in {invalid_data}"): + with self.assertRaises(DRFValidationError) as context: + UserService._validate_google_user_data(invalid_data) + + error_dict = context.exception.detail + self.assertTrue(len(error_dict) > 0) + + if "google_id" not in invalid_data: + self.assertIn(ValidationErrors.MISSING_GOOGLE_ID, str(error_dict)) + if "email" not in invalid_data: + self.assertIn(ValidationErrors.MISSING_EMAIL, str(error_dict)) + if "name" not in invalid_data: + self.assertIn(ValidationErrors.MISSING_NAME, str(error_dict)) diff --git a/todo/tests/unit/services/test_watchlist_service.py b/todo/tests/unit/services/test_watchlist_service.py new file mode 100644 index 00000000..5dac1d84 --- /dev/null +++ b/todo/tests/unit/services/test_watchlist_service.py @@ -0,0 +1,172 @@ +from unittest.mock import patch, MagicMock +from datetime import datetime, timezone +from bson import ObjectId +from django.test import TestCase, override_settings + +from todo.services.watchlist_service import WatchlistService +from todo.dto.watchlist_dto import CreateWatchlistDTO, WatchlistDTO, AssigneeDTO +from todo.dto.user_dto import UserDTO +from todo.models.task import TaskModel +from todo.models.watchlist import WatchlistModel +from todo.constants.messages import ApiErrors +from todo.dto.responses.error_response import ApiErrorResponse +from todo.dto.responses.get_watchlist_task_response import GetWatchlistTasksResponse + + +@override_settings(REST_FRAMEWORK={"DEFAULT_PAGINATION_SETTINGS": {"DEFAULT_PAGE_LIMIT": 10, "MAX_PAGE_LIMIT": 100}}) +class TestWatchlistService(TestCase): + def test_add_task_success(self): + """Test successful task addition to watchlist""" + task_id = str(ObjectId()) + user_id = str(ObjectId()) + created_by = str(ObjectId()) + + mock_task = MagicMock(spec=TaskModel) + mock_watchlist = MagicMock(spec=WatchlistModel) + mock_watchlist.taskId = task_id + mock_watchlist.userId = user_id + mock_watchlist.createdBy = created_by + mock_watchlist.createdAt = datetime.now(timezone.utc) + + dto = CreateWatchlistDTO( + taskId=task_id, userId=user_id, createdBy=created_by, createdAt=datetime.now(timezone.utc) + ) + + with ( + patch("todo.services.watchlist_service.validate_task_exists", return_value=mock_task), + patch("todo.services.watchlist_service.WatchlistRepository.get_by_user_and_task", return_value=None), + patch("todo.services.watchlist_service.WatchlistRepository.create", return_value=mock_watchlist), + ): + result = WatchlistService.add_task(dto) + self.assertEqual(result.data.taskId, task_id) + self.assertEqual(result.data.userId, user_id) + self.assertEqual(result.data.createdBy, created_by) + + def test_get_watchlisted_tasks_with_assignee(self): + """Test getting watchlisted tasks with assignee details (who the task belongs to)""" + user_id = str(ObjectId()) + task_id = str(ObjectId()) + assignee_id = str(ObjectId()) + + # Create mock assignee data (who the task belongs to) + assignee_dto = AssigneeDTO(assignee_id=assignee_id, assignee_name="John Doe", user_type="user") + + # Create mock watchlist task with assignee + mock_watchlist_task = WatchlistDTO( + taskId=task_id, + displayId="TASK-001", + title="Test Task", + description="Test Description", + priority=None, + status=None, + isAcknowledged=False, + isDeleted=False, + labels=[], + dueAt=None, + createdAt=datetime.now(timezone.utc), + createdBy=UserDTO(id=user_id, name="Test User"), + watchlistId=str(ObjectId()), + assignee=assignee_dto, + ) + + with patch("todo.services.watchlist_service.WatchlistRepository.get_watchlisted_tasks") as mock_get: + mock_get.return_value = (1, [mock_watchlist_task]) + + result = WatchlistService.get_watchlisted_tasks(page=1, limit=10, user_id=user_id) + + self.assertIsInstance(result, GetWatchlistTasksResponse) + self.assertEqual(len(result.tasks), 1) + self.assertEqual(result.tasks[0].taskId, task_id) + self.assertEqual(result.tasks[0].title, "Test Task") + + # Verify assignee details are included (who the task belongs to) + self.assertIsNotNone(result.tasks[0].assignee) + self.assertEqual(result.tasks[0].assignee.assignee_id, assignee_id) + self.assertEqual(result.tasks[0].assignee.assignee_name, "John Doe") + self.assertEqual(result.tasks[0].assignee.user_type, "user") + + def test_get_watchlisted_tasks_without_assignee(self): + """Test getting watchlisted tasks without assignee details (unassigned task)""" + user_id = str(ObjectId()) + task_id = str(ObjectId()) + + # Create mock watchlist task without assignee (unassigned task) + mock_watchlist_task = WatchlistDTO( + taskId=task_id, + displayId="TASK-002", + title="Unassigned Task", + description="Task without assignee", + priority=None, + status=None, + isAcknowledged=False, + isDeleted=False, + labels=[], + dueAt=None, + createdAt=datetime.now(timezone.utc), + createdBy=UserDTO(id=user_id, name="Test User"), + watchlistId=str(ObjectId()), + assignee=None, + ) + + with patch("todo.services.watchlist_service.WatchlistRepository.get_watchlisted_tasks") as mock_get: + mock_get.return_value = (1, [mock_watchlist_task]) + + result = WatchlistService.get_watchlisted_tasks(page=1, limit=10, user_id=user_id) + + self.assertIsInstance(result, GetWatchlistTasksResponse) + self.assertEqual(len(result.tasks), 1) + self.assertEqual(result.tasks[0].taskId, task_id) + self.assertEqual(result.tasks[0].title, "Unassigned Task") + + # Verify assignee is None (task belongs to no one) + self.assertIsNone(result.tasks[0].assignee) + + def test_add_task_validation_fails_invalid_task_id(self): + """Test that validation fails with invalid task ID""" + task_id = "invalid-id" + user_id = str(ObjectId()) + created_by = str(ObjectId()) + + dto = CreateWatchlistDTO( + taskId=task_id, userId=user_id, createdBy=created_by, createdAt=datetime.now(timezone.utc) + ) + + error_response = ApiErrorResponse(statusCode=400, message=ApiErrors.INVALID_TASK_ID, errors=[]) + + with patch("todo.services.watchlist_service.validate_task_exists", side_effect=ValueError(error_response)): + with self.assertRaises(ValueError) as context: + WatchlistService.add_task(dto) + + self.assertEqual(context.exception.args[0], error_response) + + def test_add_task_validation_fails_task_not_found(self): + """Test that validation fails when task doesn't exist""" + task_id = str(ObjectId()) + user_id = str(ObjectId()) + created_by = str(ObjectId()) + + dto = CreateWatchlistDTO( + taskId=task_id, userId=user_id, createdBy=created_by, createdAt=datetime.now(timezone.utc) + ) + + error_response = ApiErrorResponse(statusCode=404, message=ApiErrors.TASK_NOT_FOUND.format(task_id), errors=[]) + + with patch("todo.services.watchlist_service.validate_task_exists", side_effect=ValueError(error_response)): + with self.assertRaises(ValueError) as context: + WatchlistService.add_task(dto) + + self.assertEqual(context.exception.args[0], error_response) + + def test_update_task_validation_fails_invalid_task_id(self): + """Test that update validation fails with invalid task ID""" + task_id = ObjectId() + user_id = ObjectId() + dto = {"isActive": True} + + error_response = ApiErrorResponse(statusCode=400, message=ApiErrors.INVALID_TASK_ID, errors=[]) + + with patch("todo.services.watchlist_service.validate_task_exists", side_effect=ValueError(error_response)): + with self.assertRaises(ValueError) as context: + WatchlistService.update_task(task_id, dto, user_id) + + self.assertEqual(context.exception.args[0], error_response) diff --git a/todo/tests/unit/utils/__init__.py b/todo/tests/unit/utils/__init__.py new file mode 100644 index 00000000..1824c392 --- /dev/null +++ b/todo/tests/unit/utils/__init__.py @@ -0,0 +1 @@ +# Unit tests for utils module diff --git a/todo/tests/unit/utils/test_task_validation_utils.py b/todo/tests/unit/utils/test_task_validation_utils.py new file mode 100644 index 00000000..35926143 --- /dev/null +++ b/todo/tests/unit/utils/test_task_validation_utils.py @@ -0,0 +1,44 @@ +from unittest.mock import patch, MagicMock +from django.test import TestCase +from bson import ObjectId + +from todo.utils.task_validation_utils import validate_task_exists +from todo.models.task import TaskModel +from todo.constants.messages import ApiErrors +from todo.dto.responses.error_response import ApiErrorResponse + + +class TestTaskValidationUtils(TestCase): + def test_validate_task_exists_success(self): + """Test successful task validation when task exists""" + task_id = str(ObjectId()) + mock_task = MagicMock(spec=TaskModel) + + with patch("todo.utils.task_validation_utils.TaskRepository.get_by_id", return_value=mock_task): + result = validate_task_exists(task_id) + self.assertEqual(result, mock_task) + + def test_validate_task_exists_invalid_object_id(self): + """Test validation fails with invalid ObjectId format""" + invalid_task_id = "invalid-id" + + with self.assertRaises(ValueError) as context: + validate_task_exists(invalid_task_id) + + error_response = context.exception.args[0] + self.assertIsInstance(error_response, ApiErrorResponse) + self.assertEqual(error_response.statusCode, 400) + self.assertEqual(error_response.message, ApiErrors.INVALID_TASK_ID) + + def test_validate_task_exists_task_not_found(self): + """Test validation fails when task doesn't exist""" + task_id = str(ObjectId()) + + with patch("todo.utils.task_validation_utils.TaskRepository.get_by_id", return_value=None): + with self.assertRaises(ValueError) as context: + validate_task_exists(task_id) + + error_response = context.exception.args[0] + self.assertIsInstance(error_response, ApiErrorResponse) + self.assertEqual(error_response.statusCode, 404) + self.assertEqual(error_response.message, ApiErrors.TASK_NOT_FOUND.format(task_id)) diff --git a/todo/tests/unit/views/test_auth.py b/todo/tests/unit/views/test_auth.py new file mode 100644 index 00000000..e47fa801 --- /dev/null +++ b/todo/tests/unit/views/test_auth.py @@ -0,0 +1,276 @@ +from rest_framework.test import APITestCase, APIClient, APIRequestFactory +from rest_framework.reverse import reverse +from rest_framework import status +from unittest.mock import patch, Mock, PropertyMock +from bson.objectid import ObjectId +from django.conf import settings + +from todo.views.auth import GoogleCallbackView +from todo.utils.jwt_utils import generate_token_pair +from todo.constants.messages import AppMessages +from todo.tests.fixtures.user import google_auth_user_payload, users_db_data + + +class GoogleLoginViewTests(APITestCase): + def setUp(self): + super().setUp() + self.client = APIClient() + self.url = reverse("google_login") + + @patch("todo.services.google_oauth_service.GoogleOAuthService.get_authorization_url") + def test_get_returns_redirect_for_html_request(self, mock_get_auth_url): + mock_auth_url = "https://accounts.google.com/o/oauth2/auth" + mock_state = "test_state" + mock_get_auth_url.return_value = (mock_auth_url, mock_state) + + response = self.client.get(self.url) + + self.assertEqual(response.status_code, status.HTTP_302_FOUND) + self.assertEqual(response.url, mock_auth_url) + mock_get_auth_url.assert_called_once_with(None) + + @patch("todo.services.google_oauth_service.GoogleOAuthService.get_authorization_url") + def test_get_returns_json_for_json_request(self, mock_get_auth_url): + mock_auth_url = "https://accounts.google.com/o/oauth2/auth" + mock_state = "test_state" + mock_get_auth_url.return_value = (mock_auth_url, mock_state) + + response = self.client.get(self.url, HTTP_ACCEPT="application/json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["statusCode"], status.HTTP_200_OK) + self.assertEqual(response.data["message"], "Google OAuth URL generated successfully") + self.assertEqual(response.data["data"]["authUrl"], mock_auth_url) + self.assertEqual(response.data["data"]["state"], mock_state) + mock_get_auth_url.assert_called_once_with(None) + + @patch("todo.services.google_oauth_service.GoogleOAuthService.get_authorization_url") + def test_get_returns_json_with_format_parameter(self, mock_get_auth_url): + """Test that format=json parameter returns JSON response""" + mock_auth_url = "https://accounts.google.com/o/oauth2/auth" + mock_state = "test_state" + mock_get_auth_url.return_value = (mock_auth_url, mock_state) + + response = self.client.get(f"{self.url}?format=json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["statusCode"], status.HTTP_200_OK) + self.assertEqual(response.data["message"], "Google OAuth URL generated successfully") + self.assertEqual(response.data["data"]["authUrl"], mock_auth_url) + self.assertEqual(response.data["data"]["state"], mock_state) + mock_get_auth_url.assert_called_once_with(None) + + @patch("todo.services.google_oauth_service.GoogleOAuthService.get_authorization_url") + def test_get_with_redirect_url_html_request(self, mock_get_auth_url): + """Test HTML request with redirect URL""" + mock_auth_url = "https://accounts.google.com/o/oauth2/auth" + mock_state = "test_state" + mock_get_auth_url.return_value = (mock_auth_url, mock_state) + redirect_url = "http://localhost:3000/callback" + + response = self.client.get(f"{self.url}?redirectURL={redirect_url}") + + self.assertEqual(response.status_code, status.HTTP_302_FOUND) + self.assertEqual(response.url, mock_auth_url) + mock_get_auth_url.assert_called_once_with(redirect_url) + + @patch("todo.services.google_oauth_service.GoogleOAuthService.get_authorization_url") + def test_get_with_redirect_url_json_request(self, mock_get_auth_url): + """Test JSON request with redirect URL""" + mock_auth_url = "https://accounts.google.com/o/oauth2/auth" + mock_state = "test_state" + mock_get_auth_url.return_value = (mock_auth_url, mock_state) + redirect_url = "http://localhost:3000/callback" + + response = self.client.get(f"{self.url}?redirectURL={redirect_url}", HTTP_ACCEPT="application/json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["data"]["authUrl"], mock_auth_url) + self.assertEqual(response.data["data"]["state"], mock_state) + mock_get_auth_url.assert_called_once_with(redirect_url) + + @patch("todo.services.google_oauth_service.GoogleOAuthService.get_authorization_url") + def test_stores_state_in_session(self, mock_get_auth_url): + """Test that state is stored in session for both request types""" + mock_auth_url = "https://accounts.google.com/o/oauth2/auth" + mock_state = "test_state" + mock_get_auth_url.return_value = (mock_auth_url, mock_state) + + response = self.client.get(self.url, HTTP_ACCEPT="application/json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(self.client.session.get("oauth_state"), mock_state) + + +class GoogleCallbackViewTests(APITestCase): + def setUp(self): + super().setUp() + self.client = APIClient() + self.url = reverse("google_callback") + self.factory = APIRequestFactory() + self.view = GoogleCallbackView.as_view() + + self.test_user_data = users_db_data[0] + + def test_get_redirects_for_oauth_error(self): + error = "access_denied" + response = self.client.get(f"{self.url}?error={error}") + + self.assertEqual(response.status_code, status.HTTP_302_FOUND) + self.assertIn("error=access_denied", response.url) + + def test_get_redirects_for_missing_code(self): + response = self.client.get(self.url) + + self.assertEqual(response.status_code, status.HTTP_302_FOUND) + self.assertIn("error=missing_code", response.url) + + def test_get_redirects_for_missing_state(self): + response = self.client.get(f"{self.url}?code=test_code") + + self.assertEqual(response.status_code, status.HTTP_302_FOUND) + self.assertIn("error=missing_state", response.url) + + def test_get_redirects_for_invalid_state(self): + session = self.client.session + session["oauth_state"] = "correct_state" + session.save() + + response = self.client.get(f"{self.url}?code=test_code&state=wrong_state") + + self.assertEqual(response.status_code, status.HTTP_302_FOUND) + self.assertIn("error=invalid_state", response.url) + + @patch("todo.services.google_oauth_service.GoogleOAuthService.handle_callback") + @patch("todo.services.user_service.UserService.create_or_update_user") + def test_get_redirects_for_valid_code_and_state(self, mock_create_user, mock_handle_callback): + mock_google_data = { + "id": self.test_user_data["google_id"], + "email": self.test_user_data["email_id"], + "name": self.test_user_data["name"], + } + + user_id = str(ObjectId()) + mock_user = Mock() + mock_user.id = ObjectId(user_id) + mock_user.google_id = self.test_user_data["google_id"] + mock_user.email_id = self.test_user_data["email_id"] + mock_user.name = self.test_user_data["name"] + type(mock_user).id = PropertyMock(return_value=ObjectId(user_id)) + + mock_handle_callback.return_value = mock_google_data + mock_create_user.return_value = mock_user + + session = self.client.session + session["oauth_state"] = "test_state" + session.save() + + response = self.client.get(f"{self.url}?code=test_code&state=test_state") + + self.assertEqual(response.status_code, status.HTTP_302_FOUND) + self.assertIn("success=true", response.url) + self.assertIn(settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME"), response.cookies) + self.assertIn(settings.COOKIE_SETTINGS.get("REFRESH_COOKIE_NAME"), response.cookies) + self.assertNotIn("oauth_state", self.client.session) + + @patch("todo.services.google_oauth_service.GoogleOAuthService.handle_callback") + def test_get_redirects_for_callback_exception(self, mock_handle_callback): + mock_handle_callback.side_effect = Exception("OAuth service error") + + session = self.client.session + session["oauth_state"] = "test_state" + session.save() + + response = self.client.get(f"{self.url}?code=test_code&state=test_state") + + self.assertEqual(response.status_code, status.HTTP_302_FOUND) + self.assertIn("error=auth_failed", response.url) + + +class GoogleLogoutViewTests(APITestCase): + def setUp(self): + super().setUp() + self.client = APIClient() + self.url = reverse("google_logout") + + def test_post_returns_success_and_clears_cookies(self): + """Test that POST requests return JSON""" + user_data = { + "user_id": str(ObjectId()), + "google_id": google_auth_user_payload["google_id"], + "email": google_auth_user_payload["email"], + "name": google_auth_user_payload["name"], + } + tokens = generate_token_pair(user_data) + self.client.cookies[settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME")] = tokens["access_token"] + self.client.cookies[settings.COOKIE_SETTINGS.get("REFRESH_COOKIE_NAME")] = tokens["refresh_token"] + + response = self.client.post(self.url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["data"]["success"], True) + self.assertEqual(response.data["message"], AppMessages.GOOGLE_LOGOUT_SUCCESS) + self.assertEqual(response.cookies.get(settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME")).value, "") + self.assertEqual(response.cookies.get(settings.COOKIE_SETTINGS.get("REFRESH_COOKIE_NAME")).value, "") + + def test_logout_clears_session(self): + """Test that logout clears session data""" + session = self.client.session + session["oauth_state"] = "test_state" + session["some_other_data"] = "test_data" + session.save() + + response = self.client.post(self.url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertNotIn("oauth_state", self.client.session) + self.assertNotIn("some_other_data", self.client.session) + + def test_logout_clears_sessionid_cookie(self): + """Test that logout clears sessionid cookie""" + self.client.cookies["sessionid"] = "test_session_id" + + response = self.client.post(self.url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.cookies.get("sessionid").value, "") + + +class UserViewProfileTrueTests(APITestCase): + def setUp(self): + self.client = APIClient() + self.url = reverse("users") + self.user_data = { + "user_id": str(ObjectId()), + "google_id": "test_google_id", + "email": "test@example.com", + "name": "Test User", + } + tokens = generate_token_pair(self.user_data) + self.client.cookies[settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME")] = tokens["access_token"] + self.client.cookies[settings.COOKIE_SETTINGS.get("REFRESH_COOKIE_NAME")] = tokens["refresh_token"] + + def test_returns_401_if_not_authenticated(self): + client = APIClient() + response = client.get(self.url + "?profile=true") + self.assertEqual(response.status_code, 401) + + @patch("todo.repositories.user_repository.UserRepository.get_by_id") + @patch("todo.services.user_service.UserService.get_user_by_id") + def test_returns_user_info(self, mock_get_user, mock_user_repo): + from todo.models.user import UserModel + + mock_user = UserModel( + id=ObjectId(self.user_data["user_id"]), + google_id=self.user_data["google_id"], + email_id=self.user_data["email"], + name=self.user_data["name"], + picture="https://example.com/picture.jpg", + ) + mock_get_user.return_value = mock_user + mock_user_repo.return_value = mock_user + + response = self.client.get(self.url + "?profile=true") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data["data"]["id"], self.user_data["user_id"]) + self.assertEqual(response.data["data"]["email"], self.user_data["email"]) diff --git a/todo/tests/unit/views/test_label.py b/todo/tests/unit/views/test_label.py new file mode 100644 index 00000000..a9908b1f --- /dev/null +++ b/todo/tests/unit/views/test_label.py @@ -0,0 +1,115 @@ +from rest_framework.test import APIClient, APISimpleTestCase +from rest_framework.reverse import reverse +from rest_framework import status +from unittest.mock import patch, Mock +from bson.objectid import ObjectId +from rest_framework.response import Response +from django.conf import settings + +from todo.dto.responses.get_labels_response import GetLabelsResponse +from todo.dto.label_dto import LabelDTO +from todo.constants.messages import ApiErrors +from todo.utils.jwt_utils import generate_token_pair + + +class AuthenticatedTestCase(APISimpleTestCase): + def setUp(self): + super().setUp() + self.client = APIClient() + self._setup_auth_cookies() + + def _setup_auth_cookies(self): + user_data = { + "user_id": str(ObjectId()), + "google_id": "test_google_id", + "email": "test@example.com", + "name": "Test User", + } + tokens = generate_token_pair(user_data) + + self.client.cookies[settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME")] = tokens["access_token"] + self.client.cookies[settings.COOKIE_SETTINGS.get("REFRESH_COOKIE_NAME")] = tokens["refresh_token"] + + +@patch("todo.middlewares.jwt_auth.JWTAuthenticationMiddleware._try_authentication", return_value=True) +class LabelViewTests(AuthenticatedTestCase): + def setUp(self): + super().setUp() + self.url = reverse("labels") + self.label_dtos = [ + LabelDTO(id="1", name="Bug", color="red"), + LabelDTO(id="2", name="Feature", color="blue"), + ] + + @patch("todo.services.label_service.LabelService.get_labels") + def test_get_labels_returns_200_for_valid_params(self, mock_get_labels: Mock, mock_auth): + mock_get_labels.return_value = GetLabelsResponse(labels=[self.label_dtos[0]], total=1, page=1, limit=10) + + response: Response = self.client.get(self.url, {"page": 1, "limit": 10, "search": "bug"}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + mock_get_labels.assert_called_once_with(page=1, limit=10, search="bug") + self.assertEqual(response.data["total"], 1) + + @patch("todo.services.label_service.LabelService.get_labels") + def test_get_labels_uses_default_values(self, mock_get_labels: Mock, mock_auth): + mock_get_labels.return_value = GetLabelsResponse(labels=self.label_dtos, total=2, page=1, limit=10) + + response: Response = self.client.get(self.url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + mock_get_labels.assert_called_once_with(page=1, limit=10, search="") + self.assertEqual(response.data["total"], 2) + + @patch("todo.services.label_service.LabelService.get_labels") + def test_get_labels_strips_whitespace_from_search(self, mock_get_labels: Mock, mock_auth): + mock_get_labels.return_value = GetLabelsResponse(labels=[self.label_dtos[0]], total=1, page=1, limit=10) + + response: Response = self.client.get(self.url, {"search": " bug "}) + mock_get_labels.assert_called_once_with(page=1, limit=10, search="bug") + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["total"], 1) + + def test_get_labels_returns_400_for_invalid_query_params(self, mock_auth): + response: Response = self.client.get(self.url, {"page": "abc", "limit": -1, "search": 123}) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("errors", response.data) + error_fields = [error["source"]["parameter"] for error in response.data["errors"]] + self.assertIn("page", error_fields) + self.assertIn("limit", error_fields) + + @patch("todo.services.label_service.LabelService.get_labels") + def test_get_labels_returns_with_error_object(self, mock_get_labels: Mock, mock_auth): + mock_get_labels.return_value = GetLabelsResponse( + labels=[], total=0, page=1, limit=10, error={"message": ApiErrors.PAGE_NOT_FOUND, "code": "PAGE_NOT_FOUND"} + ) + + response: Response = self.client.get(self.url, {"page": 99, "limit": 10}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("error", response.data) + self.assertEqual(response.data["error"]["code"], "PAGE_NOT_FOUND") + + @patch("todo.services.label_service.LabelService.get_labels") + def test_get_labels_handles_internal_error(self, mock_get_labels: Mock, mock_auth): + mock_get_labels.return_value = GetLabelsResponse( + labels=[], + total=0, + page=1, + limit=10, + error={"message": ApiErrors.INTERNAL_SERVER_ERROR, "code": "INTERNAL_ERROR"}, + ) + + response: Response = self.client.get(self.url, {"page": 1, "limit": 10, "search": "urgent"}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["error"]["code"], "INTERNAL_ERROR") + + @patch("todo.services.label_service.LabelService.get_labels") + def test_get_labels_ignores_extra_params(self, mock_get_labels: Mock, mock_auth): + mock_get_labels.return_value = GetLabelsResponse(labels=self.label_dtos, total=2, page=1, limit=10) + + response: Response = self.client.get(self.url, {"page": 1, "limit": 10, "extra": "ignored"}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + mock_get_labels.assert_called_once_with(page=1, limit=10, search="") diff --git a/todo/tests/unit/views/test_task.py b/todo/tests/unit/views/test_task.py index a7765460..db780734 100644 --- a/todo/tests/unit/views/test_task.py +++ b/todo/tests/unit/views/test_task.py @@ -1,24 +1,41 @@ -from unittest import TestCase -from rest_framework.test import APISimpleTestCase, APIClient, APIRequestFactory from rest_framework.reverse import reverse from rest_framework import status from unittest.mock import patch, Mock from rest_framework.response import Response from django.conf import settings from datetime import datetime, timedelta, timezone - -from todo.views.task import TaskView +from bson.objectid import ObjectId +from bson.errors import InvalidId as BsonInvalidId +from todo.tests.integration.base_mongo_test import AuthenticatedMongoTestCase from todo.dto.user_dto import UserDTO from todo.dto.task_dto import TaskDTO from todo.dto.responses.get_tasks_response import GetTasksResponse from todo.dto.responses.create_task_response import CreateTaskResponse from todo.tests.fixtures.task import task_dtos -from todo.constants.task import TaskPriority, TaskStatus +from todo.constants.task import ( + TaskPriority, + TaskStatus, + SORT_FIELD_PRIORITY, + SORT_FIELD_DUE_AT, + SORT_FIELD_CREATED_AT, + SORT_FIELD_UPDATED_AT, + SORT_FIELD_ASSIGNEE, + SORT_ORDER_ASC, + SORT_ORDER_DESC, +) +from todo.dto.responses.get_task_by_id_response import GetTaskByIdResponse +from todo.exceptions.task_exceptions import TaskNotFoundException, UnprocessableEntityException +from todo.constants.messages import ValidationErrors, ApiErrors +from todo.dto.responses.error_response import ApiErrorResponse, ApiErrorDetail +from rest_framework.exceptions import ValidationError as DRFValidationError +from todo.dto.deferred_details_dto import DeferredDetailsDTO +from rest_framework.test import APIClient +from todo.dto.task_assignment_dto import TaskAssignmentDTO -class TaskViewTests(APISimpleTestCase): +class TaskViewTests(AuthenticatedMongoTestCase): def setUp(self): - self.client = APIClient() + super().setUp() self.url = reverse("tasks") self.valid_params = {"page": 1, "limit": 10} @@ -28,9 +45,17 @@ def test_get_tasks_returns_200_for_valid_params(self, mock_get_tasks: Mock): response: Response = self.client.get(self.url, self.valid_params) - mock_get_tasks.assert_called_once_with(page=1, limit=10) + mock_get_tasks.assert_called_once_with( + page=1, + limit=10, + sort_by="updatedAt", + order="desc", + user_id=str(self.user_id), + team_id=None, + status_filter=None, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - expected_response = mock_get_tasks.return_value.model_dump(mode="json", exclude_none=True) + expected_response = mock_get_tasks.return_value.model_dump(mode="json") self.assertDictEqual(response.data, expected_response) @patch("todo.services.task_service.TaskService.get_tasks") @@ -39,7 +64,15 @@ def test_get_tasks_returns_200_without_params(self, mock_get_tasks: Mock): response: Response = self.client.get(self.url) default_limit = settings.REST_FRAMEWORK["DEFAULT_PAGINATION_SETTINGS"]["DEFAULT_PAGE_LIMIT"] - mock_get_tasks.assert_called_once_with(page=1, limit=default_limit) + mock_get_tasks.assert_called_once_with( + page=1, + limit=default_limit, + sort_by="updatedAt", + order="desc", + user_id=str(self.user_id), + team_id=None, + status_filter=None, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_get_tasks_returns_400_for_invalid_query_params(self): @@ -53,7 +86,7 @@ def test_get_tasks_returns_400_for_invalid_query_params(self): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) expected_response = { "statusCode": 400, - "message": "Invalid request", + "message": "A valid integer is required.", "errors": [ {"source": {"parameter": "page"}, "detail": "A valid integer is required."}, {"source": {"parameter": "limit"}, "detail": "limit must be greater than or equal to 1"}, @@ -68,39 +101,111 @@ def test_get_tasks_returns_400_for_invalid_query_params(self): self.assertEqual(actual_error["source"]["parameter"], expected_error["source"]["parameter"]) self.assertEqual(actual_error["detail"], expected_error["detail"]) + @patch("todo.services.task_service.TaskService.get_task_by_id") + def test_get_single_task_success(self, mock_get_task_by_id: Mock): + valid_task_id = str(ObjectId()) + mock_task_data = task_dtos[0] + mock_get_task_by_id.return_value = mock_task_data + + expected_response_obj = GetTaskByIdResponse(data=mock_task_data) + + response = self.client.get(reverse("task_detail", args=[valid_task_id])) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, expected_response_obj.model_dump(mode="json")) + mock_get_task_by_id.assert_called_once_with(valid_task_id) + + @patch("todo.services.task_service.TaskService.get_task_by_id") + def test_get_single_task_not_found(self, mock_get_task_by_id: Mock): + non_existent_task_id = str(ObjectId()) + expected_error_message = ApiErrors.TASK_NOT_FOUND.format(non_existent_task_id) + mock_get_task_by_id.side_effect = TaskNotFoundException(task_id=non_existent_task_id) + + response = self.client.get(reverse("task_detail", args=[non_existent_task_id])) + + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertEqual(response.data["statusCode"], status.HTTP_404_NOT_FOUND) + self.assertEqual(response.data["message"], expected_error_message) + self.assertEqual(len(response.data["errors"]), 1) + self.assertEqual(response.data["errors"][0]["source"], {"path": "task_id"}) + self.assertEqual(response.data["errors"][0]["title"], ApiErrors.RESOURCE_NOT_FOUND_TITLE) + self.assertEqual(response.data["errors"][0]["detail"], expected_error_message) + mock_get_task_by_id.assert_called_once_with(non_existent_task_id) + + @patch("todo.services.task_service.TaskService.get_task_by_id") + def test_get_single_task_invalid_id_format(self, mock_get_task_by_id: Mock): + invalid_task_id = "invalid-id-string" + mock_get_task_by_id.side_effect = ValueError(ValidationErrors.INVALID_TASK_ID_FORMAT) + + response = self.client.get(reverse("task_detail", args=[invalid_task_id])) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data["statusCode"], status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data["message"], ValidationErrors.INVALID_TASK_ID_FORMAT) + self.assertEqual(len(response.data["errors"]), 1) + self.assertEqual(response.data["errors"][0]["source"], {"path": "task_id"}) + self.assertEqual(response.data["errors"][0]["title"], ApiErrors.VALIDATION_ERROR) + self.assertEqual(response.data["errors"][0]["detail"], ValidationErrors.INVALID_TASK_ID_FORMAT) + mock_get_task_by_id.assert_called_once_with(invalid_task_id) -class TaskViewTest(TestCase): + @patch("todo.services.task_service.TaskService.get_task_by_id") + def test_get_single_task_unexpected_error(self, mock_get_task_by_id: Mock): + task_id = str(ObjectId()) + mock_get_task_by_id.side_effect = Exception("Some random error") + + response = self.client.get(reverse("task_detail", args=[task_id])) + + self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) + self.assertEqual(response.data["statusCode"], status.HTTP_500_INTERNAL_SERVER_ERROR) + self.assertEqual(response.data["message"], ApiErrors.INTERNAL_SERVER_ERROR) + self.assertEqual(response.data["errors"][0]["detail"], ApiErrors.INTERNAL_SERVER_ERROR) + mock_get_task_by_id.assert_called_once_with(task_id) + + +class TaskViewTest(AuthenticatedMongoTestCase): def setUp(self): - self.factory = APIRequestFactory() - self.view = TaskView.as_view() + super().setUp() @patch("todo.services.task_service.TaskService.get_tasks") def test_get_tasks_with_default_pagination(self, mock_get_tasks): """Test GET /tasks without any query parameters uses default pagination""" mock_get_tasks.return_value = GetTasksResponse(tasks=task_dtos) - request = self.factory.get("/tasks") - response = self.view(request) + response = self.client.get("/v1/tasks") self.assertEqual(response.status_code, status.HTTP_200_OK) default_limit = settings.REST_FRAMEWORK["DEFAULT_PAGINATION_SETTINGS"]["DEFAULT_PAGE_LIMIT"] - mock_get_tasks.assert_called_once_with(page=1, limit=default_limit) + mock_get_tasks.assert_called_once_with( + page=1, + limit=default_limit, + sort_by="updatedAt", + order="desc", + user_id=str(self.user_id), + team_id=None, + status_filter=None, + ) @patch("todo.services.task_service.TaskService.get_tasks") def test_get_tasks_with_valid_pagination(self, mock_get_tasks): """Test GET /tasks with valid page and limit parameters""" mock_get_tasks.return_value = GetTasksResponse(tasks=task_dtos) - request = self.factory.get("/tasks", {"page": "2", "limit": "15"}) - response = self.view(request) + response = self.client.get("/v1/tasks", {"page": "2", "limit": "15"}) self.assertEqual(response.status_code, status.HTTP_200_OK) - mock_get_tasks.assert_called_once_with(page=2, limit=15) + mock_get_tasks.assert_called_once_with( + page=2, + limit=15, + sort_by="updatedAt", + order="desc", + user_id=str(self.user_id), + team_id=None, + status_filter=None, + ) def test_get_tasks_with_invalid_page(self): """Test GET /tasks with invalid page parameter""" - request = self.factory.get("/tasks", {"page": "0"}) - response = self.view(request) + response = self.client.get("/v1/tasks", {"page": "0"}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) error_detail = str(response.data) @@ -109,8 +214,7 @@ def test_get_tasks_with_invalid_page(self): def test_get_tasks_with_invalid_limit(self): """Test GET /tasks with invalid limit parameter""" - request = self.factory.get("/tasks", {"limit": "0"}) - response = self.view(request) + response = self.client.get("/v1/tasks", {"limit": "0"}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) error_detail = str(response.data) @@ -119,27 +223,188 @@ def test_get_tasks_with_invalid_limit(self): def test_get_tasks_with_non_numeric_parameters(self): """Test GET /tasks with non-numeric parameters""" - request = self.factory.get("/tasks", {"page": "abc", "limit": "def"}) - response = self.view(request) + response = self.client.get("/v1/tasks", {"page": "abc", "limit": "def"}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) error_detail = str(response.data) self.assertTrue("page" in error_detail or "limit" in error_detail) -class CreateTaskViewTests(APISimpleTestCase): +class TaskViewSortingTests(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + + @patch("todo.services.task_service.TaskService.get_tasks") + def test_get_tasks_with_sort_by_priority(self, mock_get_tasks): + mock_get_tasks.return_value = GetTasksResponse(tasks=task_dtos) + + response = self.client.get("/v1/tasks", {"sort_by": "priority"}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + mock_get_tasks.assert_called_once_with( + page=1, + limit=20, + sort_by=SORT_FIELD_PRIORITY, + order="desc", + user_id=str(self.user_id), + team_id=None, + status_filter=None, + ) + + @patch("todo.services.task_service.TaskService.get_tasks") + def test_get_tasks_with_sort_by_and_order(self, mock_get_tasks): + mock_get_tasks.return_value = GetTasksResponse(tasks=task_dtos) + + response = self.client.get("/v1/tasks", {"sort_by": "dueAt", "order": "desc"}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + mock_get_tasks.assert_called_once_with( + page=1, + limit=20, + sort_by=SORT_FIELD_DUE_AT, + order=SORT_ORDER_DESC, + user_id=str(self.user_id), + team_id=None, + status_filter=None, + ) + + @patch("todo.services.task_service.TaskService.get_tasks") + def test_get_tasks_with_all_sort_fields(self, mock_get_tasks): + mock_get_tasks.return_value = GetTasksResponse(tasks=task_dtos) + + sort_fields_with_expected_orders = [ + (SORT_FIELD_PRIORITY, "desc"), + (SORT_FIELD_DUE_AT, "asc"), + (SORT_FIELD_CREATED_AT, "desc"), + (SORT_FIELD_UPDATED_AT, "desc"), + (SORT_FIELD_ASSIGNEE, "asc"), + ] + + for sort_field, expected_order in sort_fields_with_expected_orders: + with self.subTest(sort_field=sort_field): + mock_get_tasks.reset_mock() + + response = self.client.get("/v1/tasks", {"sort_by": sort_field}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + mock_get_tasks.assert_called_once_with( + page=1, + limit=20, + sort_by=sort_field, + order=expected_order, + user_id=str(self.user_id), + team_id=None, + status_filter=None, + ) + + @patch("todo.services.task_service.TaskService.get_tasks") + def test_get_tasks_with_all_order_values(self, mock_get_tasks): + mock_get_tasks.return_value = GetTasksResponse(tasks=task_dtos) + + order_values = [SORT_ORDER_ASC, SORT_ORDER_DESC] + + for order in order_values: + with self.subTest(order=order): + mock_get_tasks.reset_mock() + + response = self.client.get("/v1/tasks", {"sort_by": SORT_FIELD_PRIORITY, "order": order}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + mock_get_tasks.assert_called_once_with( + page=1, + limit=20, + sort_by=SORT_FIELD_PRIORITY, + order=order, + user_id=str(self.user_id), + team_id=None, + status_filter=None, + ) + + def test_get_tasks_with_invalid_sort_by(self): + response = self.client.get("/v1/tasks", {"sort_by": "invalid_field"}) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + error_detail = str(response.data) + self.assertIn("sort_by", error_detail) + + def test_get_tasks_with_invalid_order(self): + response = self.client.get("/v1/tasks", {"sort_by": SORT_FIELD_PRIORITY, "order": "invalid_order"}) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + error_detail = str(response.data) + self.assertIn("order", error_detail) + + @patch("todo.services.task_service.TaskService.get_tasks") + def test_get_tasks_sorting_with_pagination(self, mock_get_tasks): + mock_get_tasks.return_value = GetTasksResponse(tasks=task_dtos) + + response = self.client.get( + "/v1/tasks", {"page": "2", "limit": "15", "sort_by": SORT_FIELD_DUE_AT, "order": SORT_ORDER_ASC} + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + mock_get_tasks.assert_called_once_with( + page=2, + limit=15, + sort_by=SORT_FIELD_DUE_AT, + order=SORT_ORDER_ASC, + user_id=str(self.user_id), + team_id=None, + status_filter=None, + ) + + @patch("todo.services.task_service.TaskService.get_tasks") + def test_get_tasks_default_behavior_unchanged(self, mock_get_tasks): + mock_get_tasks.return_value = GetTasksResponse(tasks=task_dtos) + + response = self.client.get("/v1/tasks") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + mock_get_tasks.assert_called_once_with( + page=1, + limit=20, + sort_by=SORT_FIELD_UPDATED_AT, + order="desc", + user_id=str(self.user_id), + team_id=None, + status_filter=None, + ) + + def test_get_tasks_edge_case_combinations(self): + with patch("todo.services.task_service.TaskService.get_tasks") as mock_get_tasks: + mock_get_tasks.return_value = GetTasksResponse(tasks=task_dtos) + + response = self.client.get("/v1/tasks", {"order": SORT_ORDER_ASC}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + mock_get_tasks.assert_called_once_with( + page=1, + limit=20, + sort_by=SORT_FIELD_UPDATED_AT, + order=SORT_ORDER_ASC, + user_id=str(self.user_id), + team_id=None, + status_filter=None, + ) + + +class CreateTaskViewTests(AuthenticatedMongoTestCase): def setUp(self): - self.client = APIClient() + super().setUp() self.url = reverse("tasks") + self.user_id = str(ObjectId()) self.valid_payload = { "title": "Write tests", "description": "Cover all core paths", "priority": "HIGH", "status": "IN_PROGRESS", - "assignee": "developer1", + "assignee": {"assignee_id": self.user_id, "user_type": "user"}, "labels": [], "dueAt": (datetime.now(timezone.utc) + timedelta(days=2)).isoformat().replace("+00:00", "Z"), + "timezone": "Asia/Calcutta", } @patch("todo.services.task_service.TaskService.create_task") @@ -151,7 +416,16 @@ def test_create_task_returns_201_on_success(self, mock_create_task): description=self.valid_payload["description"], priority=TaskPriority[self.valid_payload["priority"]], status=TaskStatus[self.valid_payload["status"]], - assignee=UserDTO(id="developer1", name="SYSTEM"), + assignee=TaskAssignmentDTO( + id="assignment-1", + task_id="task-1", + assignee_id="user-1", + user_type="user", + is_active=True, + created_by="user-1", + created_at=datetime.now(timezone.utc), + assignee_name="SYSTEM", + ), isAcknowledged=False, labels=[], startedAt=datetime.now(timezone.utc), @@ -220,6 +494,7 @@ def test_create_task_returns_400_when_label_ids_are_not_objectids(self): def test_create_task_returns_400_when_dueAt_is_past(self): invalid_payload = self.valid_payload.copy() invalid_payload["dueAt"] = (datetime.now(timezone.utc) - timedelta(days=1)).isoformat().replace("+00:00", "Z") + invalid_payload["timezone"] = "Asia/Kolkata" response = self.client.post(self.url, data=invalid_payload, format="json") @@ -233,6 +508,518 @@ def test_create_task_returns_500_on_internal_error(self, mock_create_task): try: response = self.client.post(self.url, data=self.valid_payload, format="json") self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) - self.assertIn("An unexpected error occurred", str(response.data)) + self.assertEqual(response.data["message"], ApiErrors.INTERNAL_SERVER_ERROR) except Exception as e: self.assertEqual(str(e), "Database exploded") + + +class TaskDeleteViewTests(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + self.valid_task_id = str(ObjectId()) + self.url = reverse("task_detail", kwargs={"task_id": self.valid_task_id}) + + @patch("todo.services.task_service.TaskService.delete_task") + def test_delete_task_returns_204_on_success(self, mock_delete_task: Mock): + mock_delete_task.return_value = None + response = self.client.delete(self.url) + mock_delete_task.assert_called_once_with(ObjectId(self.valid_task_id), str(self.user_id)) + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + self.assertEqual(response.data, None) + + @patch("todo.services.task_service.TaskService.delete_task") + def test_delete_task_returns_404_when_not_found(self, mock_delete_task: Mock): + mock_delete_task.side_effect = TaskNotFoundException(self.valid_task_id) + response = self.client.delete(self.url) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertIn(ApiErrors.TASK_NOT_FOUND.format(self.valid_task_id), response.data["message"]) + + @patch("todo.services.task_service.TaskService.delete_task") + def test_delete_task_returns_400_for_invalid_id_format(self, mock_delete_task: Mock): + mock_delete_task.side_effect = BsonInvalidId() + invalid_url = reverse("task_detail", kwargs={"task_id": "invalid-id"}) + response = self.client.delete(invalid_url) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn(ValidationErrors.INVALID_TASK_ID_FORMAT, response.data["message"]) + + +class TaskUpdateViewTests(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + self.task_id_str = str(ObjectId()) + self.task_url = f"/v1/tasks/{self.task_id_str}/update" + + # Create a mock task DTO for testing + self.updated_task_dto_fixture = TaskDTO( + id=self.task_id_str, + displayId="#TSK1", + title="Updated Task Title", + description="Updated Description", + priority=TaskPriority.HIGH, + status=TaskStatus.IN_PROGRESS, + labels=[], + dueAt=datetime.now(timezone.utc) + timedelta(days=5), + startedAt=datetime.now(timezone.utc), + isAcknowledged=True, + createdAt=datetime.now(timezone.utc), + updatedAt=datetime.now(timezone.utc), + createdBy=UserDTO(id=str(self.user_id), name="Test User"), + updatedBy=UserDTO(id=str(self.user_id), name="Test User"), + assignee=TaskAssignmentDTO( + id=str(ObjectId()), + task_id=self.task_id_str, + assignee_id=str(ObjectId()), + user_type="user", + is_active=True, + created_by=str(self.user_id), + updated_by=None, + created_at=datetime.now(timezone.utc), + updated_at=None, + ), + deferredDetails=None, + in_watchlist=None, + ) + + @patch("todo.views.task.UpdateTaskSerializer") + @patch("todo.views.task.TaskService.update_task_with_assignee_from_dict") + def test_patch_task_and_assignee_success(self, mock_service_update_task, mock_update_serializer_class): + future_date = datetime.now(timezone.utc) + timedelta(days=5) + assignee_id = str(ObjectId()) + + valid_payload = { + "title": "Updated Task Title", + "description": "Updated Description", + "priority": TaskPriority.HIGH.name, + "status": TaskStatus.IN_PROGRESS.name, + "assignee": {"assignee_id": assignee_id, "user_type": "user"}, + "dueAt": future_date.isoformat(), + } + + mock_serializer_instance = Mock() + mock_serializer_instance.is_valid.return_value = True + mock_serializer_instance.validated_data = { + "title": "Updated Task Title", + "description": "Updated Description", + "priority": TaskPriority.HIGH.name, + "status": TaskStatus.IN_PROGRESS.name, + "assignee": {"assignee_id": assignee_id, "user_type": "user"}, + "dueAt": future_date, + } + mock_update_serializer_class.return_value = mock_serializer_instance + + mock_service_update_task.return_value = self.updated_task_dto_fixture + + response = self.client.patch(self.task_url, data=valid_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + # Check that the serializer was called with the correct data + mock_update_serializer_class.assert_called_once() + call_args = mock_update_serializer_class.call_args + self.assertEqual(call_args[1]["partial"], True) + + mock_serializer_instance.is_valid.assert_called_once() + mock_service_update_task.assert_called_once() + + expected_response_data = self.updated_task_dto_fixture.model_dump(mode="json") + self.assertEqual(response.data, expected_response_data) + + @patch("todo.views.task.UpdateTaskSerializer") + def test_patch_task_and_assignee_validation_error(self, mock_update_serializer_class): + invalid_payload = { + "title": "", # Invalid: empty title + "priority": "INVALID_PRIORITY", # Invalid priority + } + + mock_serializer_instance = Mock() + mock_serializer_instance.is_valid.return_value = False + mock_serializer_instance.errors = { + "title": ["Title cannot be blank"], + "priority": ["Invalid priority value"], + } + mock_update_serializer_class.return_value = mock_serializer_instance + + response = self.client.patch(self.task_url, data=invalid_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("errors", response.data) + self.assertEqual(response.data["statusCode"], 400) + + @patch("todo.views.task.UpdateTaskSerializer") + @patch("todo.views.task.TaskService.update_task_with_assignee_from_dict") + def test_patch_task_and_assignee_task_not_found(self, mock_service_update_task, mock_update_serializer_class): + valid_payload = {"title": "Updated Title"} + + mock_serializer_instance = Mock() + mock_serializer_instance.is_valid.return_value = True + mock_serializer_instance.validated_data = {"title": "Updated Title"} + mock_update_serializer_class.return_value = mock_serializer_instance + + mock_service_update_task.side_effect = TaskNotFoundException(self.task_id_str) + + response = self.client.patch(self.task_url, data=valid_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) + self.assertIn("errors", response.data) + + @patch("todo.views.task.UpdateTaskSerializer") + @patch("todo.views.task.TaskService.update_task_with_assignee_from_dict") + def test_patch_task_and_assignee_permission_denied(self, mock_service_update_task, mock_update_serializer_class): + valid_payload = {"title": "Updated Title"} + + mock_serializer_instance = Mock() + mock_serializer_instance.is_valid.return_value = True + mock_serializer_instance.validated_data = {"title": "Updated Title"} + mock_update_serializer_class.return_value = mock_serializer_instance + + mock_service_update_task.side_effect = PermissionError(ApiErrors.UNAUTHORIZED_TITLE) + + response = self.client.patch(self.task_url, data=valid_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) + self.assertIn("errors", response.data) + + def test_patch_task_and_assignee_unauthenticated(self): + # Create a new client without authentication + unauthenticated_client = APIClient() + response = unauthenticated_client.patch(self.task_url, data={}, format="json") + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) + + +class TaskDetailViewPatchTests(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + self.task_id_str = str(ObjectId()) + self.task_url = reverse("task_detail", args=[self.task_id_str]) + self.future_date = (datetime.now(timezone.utc) + timedelta(days=7)).isoformat() + + self.updated_task_dto_fixture = TaskDTO( + id=self.task_id_str, + displayId="#UPD1", + title="Updated Title from View Test", + description="Updated description.", + priority=TaskPriority.HIGH.value, + status=TaskStatus.IN_PROGRESS.value, + assignee=TaskAssignmentDTO( + id="assignment-1", + task_id="task-1", + assignee_id="user-1", + user_type="user", + is_active=True, + created_by="user-1", + created_at=datetime.now(timezone.utc) - timedelta(days=2), + assignee_name="SYSTEM", + ), + isAcknowledged=True, + labels=[], + startedAt=datetime.now(timezone.utc) - timedelta(hours=1), + dueAt=datetime.fromisoformat( + self.future_date.replace("Z", "+00:00") if "Z" in self.future_date else self.future_date + ), + in_watchlist=None, + createdAt=datetime.now(timezone.utc) - timedelta(days=2), + updatedAt=datetime.now(timezone.utc), + createdBy=UserDTO(id="system_creator", name="SYSTEM"), + updatedBy=UserDTO(id="system_patch_user", name="SYSTEM"), + ) + + @patch("todo.views.task.UpdateTaskSerializer") + @patch("todo.views.task.TaskService.update_task") + def test_patch_task_success(self, mock_service_update_task, mock_update_serializer_class): + valid_payload = { + "title": "Updated Title from View Test", + "priority": TaskPriority.HIGH.name, + "dueAt": self.future_date, + } + + mock_serializer_instance = Mock() + mock_serializer_instance.is_valid.return_value = True + mock_serializer_instance.validated_data = valid_payload + mock_update_serializer_class.return_value = mock_serializer_instance + + mock_service_update_task.return_value = self.updated_task_dto_fixture + + response = self.client.patch(self.task_url, data=valid_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + mock_update_serializer_class.assert_called_once_with(data=valid_payload, partial=True) + mock_serializer_instance.is_valid.assert_called_once_with(raise_exception=True) + mock_service_update_task.assert_called_once_with( + task_id=self.task_id_str, validated_data=valid_payload, user_id=str(self.user_id) + ) + + expected_response_data = self.updated_task_dto_fixture.model_dump(mode="json") + self.assertEqual(response.data, expected_response_data) + + @patch("todo.views.task.UpdateTaskSerializer") + def test_patch_task_serializer_invalid_data(self, mock_update_serializer_class): + invalid_payload = {"title": " ", "dueAt": "not-a-date"} + + mock_serializer_instance = Mock() + error_detail = {"title": [ValidationErrors.BLANK_TITLE], "dueAt": ["Invalid date format."]} + mock_serializer_instance.is_valid.side_effect = DRFValidationError(detail=error_detail) + mock_update_serializer_class.return_value = mock_serializer_instance + + response = self.client.patch(self.task_url, data=invalid_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("errors", response.data) + errors_list = response.data["errors"] + + title_error_found = any( + err.get("source", {}).get("parameter") == "title" and ValidationErrors.BLANK_TITLE in err.get("detail", "") + for err in errors_list + ) + due_at_error_found = any( + err.get("source", {}).get("parameter") == "dueAt" and "Invalid date format" in err.get("detail", "") + for err in errors_list + ) + + self.assertTrue(title_error_found, "Title validation error not found in response as expected.") + self.assertTrue(due_at_error_found, "dueAt validation error not found in response as expected.") + + @patch("todo.views.task.TaskService.update_task") + @patch("todo.views.task.UpdateTaskSerializer") + def test_patch_task_service_raises_task_not_found(self, mock_update_serializer_class, mock_service_update_task): + valid_payload = {"title": "Attempt to update non-existent task"} + + mock_serializer_instance = Mock() + mock_serializer_instance.is_valid.return_value = True + mock_serializer_instance.validated_data = valid_payload + mock_update_serializer_class.return_value = mock_serializer_instance + + mock_service_update_task.side_effect = TaskNotFoundException(task_id=self.task_id_str) + + response = self.client.patch(self.task_url, data=valid_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + expected_message = ApiErrors.TASK_NOT_FOUND.format(self.task_id_str) + self.assertEqual(response.data["statusCode"], status.HTTP_404_NOT_FOUND) + self.assertEqual(response.data["message"], expected_message) + self.assertEqual(response.data["errors"][0]["detail"], expected_message) + self.assertEqual(response.data["errors"][0]["title"], ApiErrors.RESOURCE_NOT_FOUND_TITLE) + self.assertEqual(response.data["errors"][0]["source"]["path"], "task_id") + + @patch("todo.views.task.TaskService.update_task") + @patch("todo.views.task.UpdateTaskSerializer") + def test_patch_task_service_raises_bson_invalid_id_for_task_id( + self, mock_update_serializer_class, mock_service_update_task + ): + invalid_task_id_format = "not-a-valid-object-id" + url_with_invalid_id = reverse("task_detail", args=[invalid_task_id_format]) + valid_payload = {"title": "Update with invalid task ID format"} + + mock_serializer_instance = Mock() + mock_serializer_instance.is_valid.return_value = True + mock_serializer_instance.validated_data = valid_payload + mock_update_serializer_class.return_value = mock_serializer_instance + + mock_service_update_task.side_effect = BsonInvalidId(ValidationErrors.INVALID_TASK_ID_FORMAT) + + response = self.client.patch(url_with_invalid_id, data=valid_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data["statusCode"], status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data["message"], ValidationErrors.INVALID_TASK_ID_FORMAT) + self.assertEqual(response.data["errors"][0]["detail"], ValidationErrors.INVALID_TASK_ID_FORMAT) + self.assertEqual(response.data["errors"][0]["title"], ApiErrors.VALIDATION_ERROR) + self.assertEqual(response.data["errors"][0]["source"]["path"], "task_id") + + @patch("todo.views.task.TaskService.update_task") + @patch("todo.views.task.UpdateTaskSerializer") + def test_patch_task_service_raises_drf_validation_error( + self, mock_update_serializer_class, mock_service_update_task + ): + valid_payload = {"labels": ["some_valid_id", "a_label_id_that_service_finds_missing"]} + + mock_serializer_instance = Mock() + mock_serializer_instance.is_valid.return_value = True + mock_serializer_instance.validated_data = valid_payload + mock_update_serializer_class.return_value = mock_serializer_instance + + service_error_detail = { + "labels": [ValidationErrors.MISSING_LABEL_IDS.format("a_label_id_that_service_finds_missing")] + } + mock_service_update_task.side_effect = DRFValidationError(detail=service_error_detail) + + response = self.client.patch(self.task_url, data=valid_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data["statusCode"], status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data["message"], service_error_detail["labels"][0]) + + self.assertIn( + "labels", + response.data["errors"][0]["source"]["parameter"], + "Source parameter should indicate 'labels' field", + ) + self.assertEqual(response.data["errors"][0]["detail"], service_error_detail["labels"][0]) + + @patch("todo.views.task.TaskService.update_task") + @patch("todo.views.task.UpdateTaskSerializer") + def test_patch_task_service_raises_general_value_error( + self, mock_update_serializer_class, mock_service_update_task + ): + valid_payload = {"title": "Update that causes generic service error"} + + mock_serializer_instance = Mock() + mock_serializer_instance.is_valid.return_value = True + mock_serializer_instance.validated_data = valid_payload + mock_update_serializer_class.return_value = mock_serializer_instance + + simulated_service_api_error = ApiErrorResponse( + statusCode=status.HTTP_500_INTERNAL_SERVER_ERROR, + message=ApiErrors.SERVER_ERROR, + errors=[ApiErrorDetail(detail="Failed to save task updates in service.", title=ApiErrors.UNEXPECTED_ERROR)], + ) + mock_service_update_task.side_effect = ValueError(simulated_service_api_error) + + response = self.client.patch(self.task_url, data=valid_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) + self.assertEqual(response.data["statusCode"], status.HTTP_500_INTERNAL_SERVER_ERROR) + self.assertEqual(response.data["message"], ApiErrors.SERVER_ERROR) + self.assertEqual(response.data["errors"][0]["detail"], "Failed to save task updates in service.") + self.assertEqual(response.data["errors"][0]["title"], ApiErrors.UNEXPECTED_ERROR) + + @patch("todo.views.task.TaskService.update_task") + @patch("todo.views.task.UpdateTaskSerializer") + def test_patch_task_service_raises_unhandled_exception( + self, mock_update_serializer_class, mock_service_update_task + ): + valid_payload = {"title": "Update that causes unhandled service error"} + + mock_serializer_instance = Mock() + mock_serializer_instance.is_valid.return_value = True + mock_serializer_instance.validated_data = valid_payload + mock_update_serializer_class.return_value = mock_serializer_instance + + mock_service_update_task.side_effect = Exception("Something completely unexpected broke!") + + with patch.object(settings, "DEBUG", False): + response = self.client.patch(self.task_url, data=valid_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) + self.assertEqual(response.data["statusCode"], status.HTTP_500_INTERNAL_SERVER_ERROR) + self.assertEqual(response.data["message"], ApiErrors.INTERNAL_SERVER_ERROR) + self.assertEqual(response.data["errors"][0]["detail"], ApiErrors.INTERNAL_SERVER_ERROR) + + with patch.object(settings, "DEBUG", True): + response_debug = self.client.patch(self.task_url, data=valid_payload, format="json") + self.assertEqual(response_debug.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) + self.assertEqual(response_debug.data["errors"][0]["detail"], "Something completely unexpected broke!") + + @patch("todo.views.task.TaskService.update_task") + @patch("todo.views.task.UpdateTaskSerializer") + def test_patch_task_service_raises_exception(self, mock_update_serializer_class, mock_service_update_task): + mock_service_update_task.side_effect = Exception("A wild error appears!") + mock_serializer_instance = mock_update_serializer_class.return_value + mock_serializer_instance.is_valid.return_value = True + + response = self.client.patch(self.task_url, data={}, format="json") + + self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) + + @patch("todo.views.task.DeferTaskSerializer") + @patch("todo.views.task.TaskService.defer_task") + def test_patch_task_defer_action_success(self, mock_service_defer_task, mock_defer_serializer_class): + deferred_till_datetime = datetime.now(timezone.utc) + timedelta(days=5) + deferred_task_dto = self.updated_task_dto_fixture.model_copy(deep=True) + deferred_task_dto.deferredDetails = DeferredDetailsDTO( + deferredAt=datetime.now(timezone.utc), + deferredTill=deferred_till_datetime, + deferredBy=UserDTO(id="system_defer_user", name="SYSTEM"), + ) + mock_service_defer_task.return_value = deferred_task_dto + mock_serializer_instance = mock_defer_serializer_class.return_value + mock_serializer_instance.is_valid.return_value = True + mock_serializer_instance.validated_data = {"deferredTill": deferred_till_datetime} + + url_with_action = f"{self.task_url}?action=defer" + request_data = {"deferredTill": deferred_till_datetime.isoformat()} + response = self.client.patch(url_with_action, data=request_data, format="json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, deferred_task_dto.model_dump(mode="json")) + mock_defer_serializer_class.assert_called_once_with(data=request_data) + mock_service_defer_task.assert_called_once_with( + task_id=self.task_id_str, + deferred_till=deferred_till_datetime, + user_id=str(self.user_id), + ) + + @patch("todo.views.task.DeferTaskSerializer") + def test_patch_task_defer_action_serializer_invalid(self, mock_defer_serializer_class): + mock_serializer_instance = mock_defer_serializer_class.return_value + validation_error = DRFValidationError({"deferredTill": ["This field may not be blank."]}) + mock_serializer_instance.is_valid.side_effect = validation_error + + url_with_action = f"{self.task_url}?action=defer" + response = self.client.patch(url_with_action, data={}, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + @patch("todo.views.task.TaskService.defer_task") + @patch("todo.views.task.DeferTaskSerializer") + def test_patch_task_defer_service_raises_task_not_found(self, mock_defer_serializer_class, mock_service_defer_task): + deferred_till_datetime = datetime.now(timezone.utc) + timedelta(days=5) + mock_service_defer_task.side_effect = TaskNotFoundException(self.task_id_str) + mock_serializer_instance = mock_defer_serializer_class.return_value + mock_serializer_instance.is_valid.return_value = True + mock_serializer_instance.validated_data = {"deferredTill": deferred_till_datetime} + + url_with_action = f"{self.task_url}?action=defer" + response = self.client.patch( + url_with_action, data={"deferredTill": deferred_till_datetime.isoformat()}, format="json" + ) + + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + @patch("todo.views.task.TaskService.defer_task") + @patch("todo.views.task.DeferTaskSerializer") + def test_patch_task_defer_service_raises_unprocessable_entity( + self, mock_defer_serializer_class, mock_service_defer_task + ): + deferred_till_datetime = datetime.now(timezone.utc) + timedelta(days=5) + error_message = "Cannot defer too close to due date." + mock_service_defer_task.side_effect = UnprocessableEntityException(error_message) + mock_serializer_instance = mock_defer_serializer_class.return_value + mock_serializer_instance.is_valid.return_value = True + mock_serializer_instance.validated_data = {"deferredTill": deferred_till_datetime} + + url_with_action = f"{self.task_url}?action=defer" + response = self.client.patch( + url_with_action, data={"deferredTill": deferred_till_datetime.isoformat()}, format="json" + ) + + self.assertEqual(response.status_code, status.HTTP_422_UNPROCESSABLE_ENTITY) + self.assertEqual(response.data["message"], error_message) + + def test_patch_task_unsupported_action_raises_validation_error(self): + unsupported_action = "archive" + url = reverse("task_detail", kwargs={"task_id": self.task_id_str}) + response = self.client.patch(f"{url}?action={unsupported_action}", data={}, format="json") + + expected_detail = ValidationErrors.UNSUPPORTED_ACTION.format(unsupported_action) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data["errors"][0]["detail"], expected_detail) + + +class TaskViewProfileTrueTests(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + self.url = reverse("tasks") + + @patch("todo.services.task_service.TaskService.get_tasks_for_user") + def test_get_tasks_profile_true_returns_user_tasks(self, mock_get_tasks_for_user): + mock_get_tasks_for_user.return_value = GetTasksResponse(tasks=[]) + response = self.client.get(self.url + "?profile=true") + mock_get_tasks_for_user.assert_called_once() + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_get_tasks_profile_true_requires_auth(self): + client = APIClient() + response = client.get(self.url + "?profile=true") + self.assertEqual(response.status_code, 401) diff --git a/todo/tests/unit/views/test_task_assignment.py b/todo/tests/unit/views/test_task_assignment.py new file mode 100644 index 00000000..bc316637 --- /dev/null +++ b/todo/tests/unit/views/test_task_assignment.py @@ -0,0 +1,156 @@ +import unittest +from unittest.mock import patch +from rest_framework import status +from bson import ObjectId +from datetime import datetime, timezone + +from todo.tests.integration.base_mongo_test import AuthenticatedMongoTestCase +from todo.dto.task_assignment_dto import TaskAssignmentDTO +from todo.dto.responses.create_task_assignment_response import CreateTaskAssignmentResponse + + +class TaskAssignmentViewTests(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + self.url = "/v1/task-assignments" + self.task_id = str(ObjectId()) + self.team_id = str(ObjectId()) + + self.valid_user_assignment_payload = { + "task_id": self.task_id, + "assignee_id": str(self.user_id), + "user_type": "user", + } + + self.valid_team_assignment_payload = {"task_id": self.task_id, "assignee_id": self.team_id, "user_type": "team"} + + @unittest.skip("Skipping temporarily") + @patch("todo.services.task_assignment_service.TaskAssignmentService.create_task_assignment") + def test_create_user_assignment_success(self, mock_create_assignment): + # Mock service response + response_dto = TaskAssignmentDTO( + id=str(ObjectId()), + task_id=self.task_id, + assignee_id=str(self.user_id), + user_type="user", + is_active=True, + created_by=str(self.user_id), + created_at=datetime.now(timezone.utc), + ) + mock_create_assignment.return_value = CreateTaskAssignmentResponse(data=response_dto) + + response = self.client.post(self.url, data=self.valid_user_assignment_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertIn("data", response.data) + self.assertEqual(response.data["data"]["user_type"], "user") + mock_create_assignment.assert_called_once() + + @unittest.skip("Skipping temporarily") + @patch("todo.services.task_assignment_service.TaskAssignmentService.create_task_assignment") + def test_create_team_assignment_success(self, mock_create_assignment): + # Mock service response + response_dto = TaskAssignmentDTO( + id=str(ObjectId()), + task_id=self.task_id, + assignee_id=self.team_id, + user_type="team", + is_active=True, + created_by=str(self.user_id), + created_at=datetime.now(timezone.utc), + assignee_name="SYSTEM", + ) + mock_create_assignment.return_value = CreateTaskAssignmentResponse(data=response_dto) + + response = self.client.post(self.url, data=self.valid_team_assignment_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertIn("data", response.data) + self.assertEqual(response.data["data"]["user_type"], "team") + mock_create_assignment.assert_called_once() + + def test_create_assignment_invalid_user_type(self): + invalid_payload = {"task_id": self.task_id, "assignee_id": str(self.user_id), "user_type": "invalid_type"} + + response = self.client.post(self.url, data=invalid_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("errors", response.data) + + def test_create_assignment_invalid_task_id(self): + invalid_payload = {"task_id": "invalid_id", "assignee_id": str(self.user_id), "user_type": "user"} + + response = self.client.post(self.url, data=invalid_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("errors", response.data) + + def test_create_assignment_missing_required_fields(self): + incomplete_payload = { + "task_id": self.task_id, + # Missing assignee_id and user_type + } + + response = self.client.post(self.url, data=incomplete_payload, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("errors", response.data) + + +class TaskAssignmentDetailViewTests(AuthenticatedMongoTestCase): + def setUp(self): + super().setUp() + self.task_id = str(ObjectId()) + self.url = f"/v1/task-assignments/{self.task_id}" + + @unittest.skip("Skipping temporarily") + @patch("todo.services.task_assignment_service.TaskAssignmentService.get_task_assignment") + def test_get_task_assignment_success(self, mock_get_assignment): + # Mock service response + response_dto = TaskAssignmentDTO( + id=str(ObjectId()), + task_id=self.task_id, + assignee_id=str(self.user_id), + user_type="user", + is_active=True, + created_by=str(self.user_id), + created_at=datetime.now(timezone.utc), + assignee_name="SYSTEM", + ) + mock_get_assignment.return_value = response_dto + + response = self.client.get(self.url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["task_id"], self.task_id) + mock_get_assignment.assert_called_once_with(self.task_id) + + @patch("todo.services.task_assignment_service.TaskAssignmentService.get_task_assignment") + def test_get_task_assignment_not_found(self, mock_get_assignment): + # Mock service returning None + mock_get_assignment.return_value = None + + response = self.client.get(self.url) + + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertIn("message", response.data) + + @patch("todo.services.task_assignment_service.TaskAssignmentService.delete_task_assignment") + def test_delete_task_assignment_success(self, mock_delete_assignment): + # Mock successful deletion + mock_delete_assignment.return_value = True + + response = self.client.delete(self.url) + + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + mock_delete_assignment.assert_called_once_with(self.task_id, str(self.user_id)) + + @patch("todo.services.task_assignment_service.TaskAssignmentService.delete_task_assignment") + def test_delete_task_assignment_not_found(self, mock_delete_assignment): + # Mock unsuccessful deletion + mock_delete_assignment.return_value = False + + response = self.client.delete(self.url) + + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertIn("message", response.data) diff --git a/todo/tests/unit/views/test_team.py b/todo/tests/unit/views/test_team.py new file mode 100644 index 00000000..b733e374 --- /dev/null +++ b/todo/tests/unit/views/test_team.py @@ -0,0 +1,184 @@ +from unittest import TestCase +from unittest.mock import patch, MagicMock +from rest_framework.test import APIClient +from rest_framework import status + +from todo.views.team import TeamListView, JoinTeamByInviteCodeView, RemoveTeamMemberView +from todo.dto.responses.get_user_teams_response import GetUserTeamsResponse +from todo.dto.team_dto import TeamDTO +from datetime import datetime, timezone + + +class TeamListViewTests(TestCase): + def setUp(self): + self.client = APIClient() + self.view = TeamListView() + self.mock_user_id = "507f1f77bcf86cd799439011" + + @patch("todo.views.team.TeamService.get_user_teams") + def test_get_user_teams_success(self, mock_get_user_teams): + """Test successful retrieval of user teams""" + # Mock team data + team_dto = TeamDTO( + id="507f1f77bcf86cd799439012", + name="Test Team", + description="Test Description", + poc_id="507f1f77bcf86cd799439013", + invite_code="TEST123", + created_by="507f1f77bcf86cd799439011", + updated_by="507f1f77bcf86cd799439011", + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + mock_response = GetUserTeamsResponse(teams=[team_dto], total=1) + mock_get_user_teams.return_value = mock_response + + # Mock request with user_id + mock_request = MagicMock() + mock_request.user_id = self.mock_user_id + + response = self.view.get(mock_request) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + mock_get_user_teams.assert_called_once_with(self.mock_user_id) + + # Check response data + response_data = response.data + self.assertEqual(response_data["total"], 1) + self.assertEqual(len(response_data["teams"]), 1) + self.assertEqual(response_data["teams"][0]["name"], "Test Team") + + @patch("todo.views.team.TeamService.get_user_teams") + def test_get_user_teams_empty_result(self, mock_get_user_teams): + """Test when user has no teams""" + mock_response = GetUserTeamsResponse(teams=[], total=0) + mock_get_user_teams.return_value = mock_response + + mock_request = MagicMock() + mock_request.user_id = self.mock_user_id + + response = self.view.get(mock_request) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + response_data = response.data + self.assertEqual(response_data["total"], 0) + self.assertEqual(len(response_data["teams"]), 0) + + @patch("todo.views.team.TeamService.get_user_teams") + def test_get_user_teams_service_error(self, mock_get_user_teams): + """Test when service throws an error""" + mock_get_user_teams.side_effect = ValueError("Service error") + + mock_request = MagicMock() + mock_request.user_id = self.mock_user_id + + response = self.view.get(mock_request) + + self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) + response_data = response.data + self.assertEqual(response_data["statusCode"], 500) + + +class JoinTeamByInviteCodeViewTests(TestCase): + def setUp(self): + self.client = APIClient() + self.view = JoinTeamByInviteCodeView() + self.mock_user_id = "507f1f77bcf86cd799439011" + + @patch("todo.views.team.TeamService.join_team_by_invite_code") + def test_join_team_by_invite_code_success(self, mock_join): + team_dto = TeamDTO( + id="507f1f77bcf86cd799439012", + name="Test Team", + description="Test Description", + poc_id="507f1f77bcf86cd799439013", + invite_code="TEST123", + created_by="507f1f77bcf86cd799439011", + updated_by="507f1f77bcf86cd799439011", + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + mock_join.return_value = team_dto + mock_request = MagicMock() + mock_request.user_id = self.mock_user_id + mock_request.data = {"invite_code": "TEST123"} + response = self.view.post(mock_request) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["name"], "Test Team") + + @patch("todo.views.team.TeamService.join_team_by_invite_code") + def test_join_team_by_invite_code_invalid_code(self, mock_join): + mock_join.side_effect = ValueError("Invalid invite code or team does not exist.") + mock_request = MagicMock() + mock_request.user_id = self.mock_user_id + mock_request.data = {"invite_code": "INVALID"} + response = self.view.post(mock_request) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("Invalid invite code", response.data["detail"]) + + @patch("todo.views.team.TeamService.join_team_by_invite_code") + def test_join_team_by_invite_code_already_member(self, mock_join): + mock_join.side_effect = ValueError("User is already a member of this team.") + mock_request = MagicMock() + mock_request.user_id = self.mock_user_id + mock_request.data = {"invite_code": "TEST123"} + response = self.view.post(mock_request) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("already a member", response.data["detail"]) + + def test_join_team_by_invite_code_validation_error(self): + mock_request = MagicMock() + mock_request.user_id = self.mock_user_id + mock_request.data = {"invite_code": ""} # Empty code + response = self.view.post(mock_request) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("invite_code", response.data) + + +class RemoveTeamMemberViewTests(TestCase): + def setUp(self): + self.view = RemoveTeamMemberView() + self.team_id = "507f1f77bcf86cd799439012" + self.user_id = "507f1f77bcf86cd799439011" + self.mock_user_id = "507f1f77bcf86cd799439013" + + @patch("todo.views.team.TeamService.remove_member_from_team") + def test_remove_member_success(self, mock_remove): + mock_remove.return_value = True + + mock_request = MagicMock() + mock_request.user_id = self.mock_user_id + + response = self.view.delete(mock_request, self.team_id, self.user_id) + + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + mock_remove.assert_called_once_with( + user_id=self.user_id, team_id=self.team_id, removed_by_user_id=self.mock_user_id + ) + + @patch("todo.views.team.TeamService.remove_member_from_team") + def test_remove_member_not_found(self, mock_remove): + from todo.services.team_service import TeamService + + mock_remove.side_effect = TeamService.TeamOrUserNotFound() + + mock_request = MagicMock() + mock_request.user_id = self.mock_user_id + + response = self.view.delete(mock_request, self.team_id, self.user_id) + + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertIn("not found", response.data["detail"]) + + @patch("todo.views.team.TeamService.remove_member_from_team") + def test_remove_member_generic_error(self, mock_remove): + mock_remove.side_effect = Exception("Something went wrong") + + mock_request = MagicMock() + mock_request.user_id = self.mock_user_id + + response = self.view.delete(mock_request, self.team_id, self.user_id) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("Something went wrong", response.data["detail"]) diff --git a/todo/tests/unit/views/test_watchlist_check.py b/todo/tests/unit/views/test_watchlist_check.py new file mode 100644 index 00000000..6df8a5c2 --- /dev/null +++ b/todo/tests/unit/views/test_watchlist_check.py @@ -0,0 +1,128 @@ +from rest_framework import status +from rest_framework.test import APITestCase +from bson import ObjectId +from unittest.mock import patch, Mock +from django.conf import settings + +from todo.models.watchlist import WatchlistModel +from todo.utils.jwt_utils import generate_token_pair + + +class WatchlistCheckViewTests(APITestCase): + def setUp(self): + super().setUp() + self.url = "/v1/watchlist/tasks/check" + self.task_id = str(ObjectId()) + self.user_id = str(ObjectId()) + + # Set up authentication + self.user_data = { + "user_id": self.user_id, + "google_id": "test_google_id", + "email": "test@example.com", + "name": "Test User", + } + tokens = generate_token_pair(self.user_data) + self.client.cookies[settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME")] = tokens["access_token"] + self.client.cookies[settings.COOKIE_SETTINGS.get("REFRESH_COOKIE_NAME")] = tokens["refresh_token"] + + @patch("todo.repositories.watchlist_repository.WatchlistRepository.get_by_user_and_task") + @patch("todo.repositories.user_repository.UserRepository.get_by_id") + def test_check_task_not_in_watchlist(self, mock_user_repo, mock_watchlist_repo): + """Test that a task not in watchlist returns null.""" + from todo.models.user import UserModel + + mock_user = Mock(spec=UserModel) + mock_user.email_id = "test@example.com" + mock_user_repo.return_value = mock_user + mock_watchlist_repo.return_value = None # No watchlist entry + + response = self.client.get(f"{self.url}?task_id={self.task_id}") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIsNone(response.data["in_watchlist"]) + + @patch("todo.repositories.watchlist_repository.WatchlistRepository.get_by_user_and_task") + @patch("todo.repositories.user_repository.UserRepository.get_by_id") + def test_check_task_in_watchlist(self, mock_user_repo, mock_watchlist_repo): + """Test that a task in watchlist returns true.""" + from todo.models.user import UserModel + + mock_user = Mock(spec=UserModel) + mock_user.email_id = "test@example.com" + mock_user_repo.return_value = mock_user + + mock_watchlist_entry = Mock(spec=WatchlistModel) + mock_watchlist_entry.isActive = True + mock_watchlist_repo.return_value = mock_watchlist_entry + + response = self.client.get(f"{self.url}?task_id={self.task_id}") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["in_watchlist"], True) + + @patch("todo.repositories.watchlist_repository.WatchlistRepository.get_by_user_and_task") + @patch("todo.repositories.user_repository.UserRepository.get_by_id") + def test_check_task_in_watchlist_but_inactive(self, mock_user_repo, mock_watchlist_repo): + """Test that an inactive watchlist entry returns false.""" + from todo.models.user import UserModel + + mock_user = Mock(spec=UserModel) + mock_user.email_id = "test@example.com" + mock_user_repo.return_value = mock_user + + mock_watchlist_entry = Mock(spec=WatchlistModel) + mock_watchlist_entry.isActive = False + mock_watchlist_repo.return_value = mock_watchlist_entry + + response = self.client.get(f"{self.url}?task_id={self.task_id}") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["in_watchlist"], False) + + @patch("todo.repositories.user_repository.UserRepository.get_by_id") + def test_check_missing_task_id(self, mock_user_repo): + """Test that missing task_id returns 400.""" + from todo.models.user import UserModel + + mock_user = Mock(spec=UserModel) + mock_user.email_id = "test@example.com" + mock_user_repo.return_value = mock_user + + response = self.client.get(self.url) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("task_id is required", response.data["message"]) + + @patch("todo.repositories.user_repository.UserRepository.get_by_id") + def test_check_invalid_task_id(self, mock_user_repo): + """Test that invalid task_id returns 400.""" + from todo.models.user import UserModel + + mock_user = Mock(spec=UserModel) + mock_user.email_id = "test@example.com" + mock_user_repo.return_value = mock_user + + response = self.client.get(f"{self.url}?task_id=invalid_id") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("Invalid task_id", response.data["message"]) + + @patch("todo.repositories.watchlist_repository.WatchlistRepository.get_by_user_and_task") + @patch("todo.repositories.user_repository.UserRepository.get_by_id") + def test_check_task_in_watchlist_with_updated_by(self, mock_user_repo, mock_watchlist_repo): + """Test that a task with updatedBy ObjectId works correctly.""" + from todo.models.user import UserModel + + mock_user = Mock(spec=UserModel) + mock_user.email_id = "test@example.com" + mock_user_repo.return_value = mock_user + + mock_watchlist_entry = Mock(spec=WatchlistModel) + mock_watchlist_entry.isActive = True + mock_watchlist_repo.return_value = mock_watchlist_entry + + response = self.client.get(f"{self.url}?task_id={self.task_id}") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["in_watchlist"], True) diff --git a/todo/urls.py b/todo/urls.py index 9264115f..7b4f2277 100644 --- a/todo/urls.py +++ b/todo/urls.py @@ -1,9 +1,68 @@ from django.urls import path -from todo.views.task import TaskView +from todo.views.task import TaskListView, TaskDetailView, TaskUpdateView from todo.views.health import HealthView - +from todo.views.user import UsersView +from todo.views.auth import GoogleLoginView, GoogleCallbackView, LogoutView +from todo.views.role import RoleListView, RoleDetailView +from todo.views.user_role import UserRoleListView, TeamUserRoleListView, TeamUserRoleDetailView, TeamUserRoleDeleteView +from todo.views.label import LabelListView +from todo.views.team import ( + TeamListView, + TeamDetailView, + JoinTeamByInviteCodeView, + AddTeamMembersView, + TeamInviteCodeView, + TeamActivityTimelineView, + RemoveTeamMemberView, +) +from todo.views.team_creation_invite_code import ( + GenerateTeamCreationInviteCodeView, + VerifyTeamCreationInviteCodeView, + ListTeamCreationInviteCodesView, +) +from todo.views.watchlist import WatchlistListView, WatchlistDetailView, WatchlistCheckView +from todo.views.task_assignment import TaskAssignmentView, TaskAssignmentDetailView +from todo.views.task import AssignTaskToUserView urlpatterns = [ - path("tasks", TaskView.as_view(), name="tasks"), + path("teams", TeamListView.as_view(), name="teams"), + path("teams/join-by-invite", JoinTeamByInviteCodeView.as_view(), name="join_team_by_invite"), + path("teams/", TeamDetailView.as_view(), name="team_detail"), + path("teams//members", AddTeamMembersView.as_view(), name="add_team_members"), + path("teams//users/roles", TeamUserRoleListView.as_view(), name="team_user_roles"), + path( + "teams//users//roles", TeamUserRoleDetailView.as_view(), name="team_user_role_detail" + ), + path( + "teams//users//roles/", + TeamUserRoleDeleteView.as_view(), + name="team_user_role_delete", + ), + path("teams//invite-code", TeamInviteCodeView.as_view(), name="team_invite_code"), + path("teams//activity-timeline", TeamActivityTimelineView.as_view(), name="team_activity_timeline"), + path("tasks", TaskListView.as_view(), name="tasks"), + path("tasks/", TaskDetailView.as_view(), name="task_detail"), + path("tasks//update", TaskUpdateView.as_view(), name="update_task_and_assignee"), + path("tasks//assign", AssignTaskToUserView.as_view(), name="assign_task_to_user"), + path("task-assignments", TaskAssignmentView.as_view(), name="task_assignments"), + path("task-assignments/", TaskAssignmentDetailView.as_view(), name="task_assignment_detail"), + path("roles", RoleListView.as_view(), name="roles"), + path("roles/", RoleDetailView.as_view(), name="role_detail"), path("health", HealthView.as_view(), name="health"), + path("labels", LabelListView.as_view(), name="labels"), + path("watchlist/tasks", WatchlistListView.as_view(), name="watchlist"), + path("watchlist/tasks/check", WatchlistCheckView.as_view(), name="watchlist_check"), + path("watchlist/tasks/", WatchlistDetailView.as_view(), name="watchlist_task"), + path("auth/google/login", GoogleLoginView.as_view(), name="google_login"), + path("auth/google/callback", GoogleCallbackView.as_view(), name="google_callback"), + path("auth/logout", LogoutView.as_view(), name="google_logout"), + path("users", UsersView.as_view(), name="users"), + path("users//roles", UserRoleListView.as_view(), name="user_roles"), + path("team-invite-codes/generate", GenerateTeamCreationInviteCodeView.as_view(), name="generate_team_invite_code"), + path("team-invite-codes/verify", VerifyTeamCreationInviteCodeView.as_view(), name="verify_team_invite_code"), + path("team-invite-codes", ListTeamCreationInviteCodesView.as_view(), name="list_team_invite_codes"), +] + +urlpatterns += [ + path("teams//members/", RemoveTeamMemberView.as_view(), name="remove_team_member"), ] diff --git a/todo/utils/invite_code_utils.py b/todo/utils/invite_code_utils.py new file mode 100644 index 00000000..089233be --- /dev/null +++ b/todo/utils/invite_code_utils.py @@ -0,0 +1,31 @@ +import hashlib +import datetime + + +def generate_invite_code(team_name: str) -> str: + """ + Generate a unique 6-character invite code for a team. + + Args: + team_name: The name of the team + + Returns: + A 6-character alphanumeric invite code + """ + now = datetime.datetime.utcnow().isoformat() + seed = f"{team_name}_{now}" + + hash_bytes = hashlib.sha256(seed.encode()).hexdigest() + + hash_int = int(hash_bytes[:10], 16) # Take first 10 hex digits + base36 = "" + characters = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" + + while len(base36) < 6: + hash_int, i = divmod(hash_int, 36) if hash_int > 0 else (0, 0) + base36 = characters[i] + base36 + + hash_int, i = divmod(hash_int, 36) + base36 = characters[i] + base36 + + return base36.zfill(6) diff --git a/todo/utils/jwt_utils.py b/todo/utils/jwt_utils.py new file mode 100644 index 00000000..c915741a --- /dev/null +++ b/todo/utils/jwt_utils.py @@ -0,0 +1,113 @@ +import jwt +from datetime import datetime, timedelta, timezone +from django.conf import settings + +from todo.exceptions.auth_exceptions import ( + TokenExpiredError, + TokenInvalidError, + RefreshTokenExpiredError, +) + +from todo.constants.messages import AuthErrorMessages + + +def generate_access_token(user_data: dict) -> str: + try: + now = datetime.now(timezone.utc) + expiry = now + timedelta(seconds=settings.JWT_CONFIG.get("ACCESS_TOKEN_LIFETIME")) + + payload = { + "iss": "todo-app-auth", + "exp": int(expiry.timestamp()), + "iat": int(now.timestamp()), + "sub": user_data["user_id"], + "user_id": user_data["user_id"], + "token_type": "access", + } + + token = jwt.encode( + payload=payload, + key=settings.JWT_CONFIG.get("PRIVATE_KEY"), + algorithm=settings.JWT_CONFIG.get("ALGORITHM"), + ) + return token + + except Exception as e: + raise TokenInvalidError(f"Token generation failed: {str(e)}") + + +def generate_refresh_token(user_data: dict) -> str: + try: + now = datetime.now(timezone.utc) + expiry = now + timedelta(seconds=settings.JWT_CONFIG.get("REFRESH_TOKEN_LIFETIME")) + + payload = { + "iss": "todo-app-auth", + "exp": int(expiry.timestamp()), + "iat": int(now.timestamp()), + "sub": user_data["user_id"], + "user_id": user_data["user_id"], + "token_type": "refresh", + } + token = jwt.encode( + payload=payload, + key=settings.JWT_CONFIG.get("PRIVATE_KEY"), + algorithm=settings.JWT_CONFIG.get("ALGORITHM"), + ) + + return token + + except Exception as e: + raise TokenInvalidError(f"Refresh token generation failed: {str(e)}") + + +def validate_access_token(token: str) -> dict: + try: + payload = jwt.decode( + jwt=token, + key=settings.JWT_CONFIG.get("PUBLIC_KEY"), + algorithms=[settings.JWT_CONFIG.get("ALGORITHM")], + ) + + if payload.get("token_type") != "access": + raise TokenInvalidError(AuthErrorMessages.TOKEN_INVALID) + + return payload + + except jwt.ExpiredSignatureError: + raise TokenExpiredError() + except jwt.InvalidTokenError as e: + raise TokenInvalidError(f"Invalid token: {str(e)}") + except Exception as e: + raise TokenInvalidError(f"Token validation failed: {str(e)}") + + +def validate_refresh_token(token: str) -> dict: + try: + payload = jwt.decode( + jwt=token, + key=settings.JWT_CONFIG.get("PUBLIC_KEY"), + algorithms=[settings.JWT_CONFIG.get("ALGORITHM")], + ) + if payload.get("token_type") != "refresh": + raise TokenInvalidError(AuthErrorMessages.TOKEN_INVALID) + + return payload + + except jwt.ExpiredSignatureError: + raise RefreshTokenExpiredError() + except jwt.InvalidTokenError as e: + raise TokenInvalidError(f"Invalid refresh token: {str(e)}") + except Exception as e: + raise TokenInvalidError(f"Refresh token validation failed: {str(e)}") + + +def generate_token_pair(user_data: dict) -> dict: + access_token = generate_access_token(user_data) + refresh_token = generate_refresh_token(user_data) + + return { + "access_token": access_token, + "refresh_token": refresh_token, + "expires_in": settings.JWT_CONFIG.get("ACCESS_TOKEN_LIFETIME"), + } diff --git a/todo/utils/task_validation_utils.py b/todo/utils/task_validation_utils.py new file mode 100644 index 00000000..f4e28adb --- /dev/null +++ b/todo/utils/task_validation_utils.py @@ -0,0 +1,56 @@ +from bson import ObjectId + +from todo.repositories.task_repository import TaskRepository +from todo.models.task import TaskModel +from todo.constants.messages import ApiErrors +from todo.dto.responses.error_response import ApiErrorResponse, ApiErrorDetail, ApiErrorSource + + +def validate_task_exists(task_id: str) -> TaskModel: + """ + Common function to validate if a task exists in the task collection. + + Args: + task_id (str): The task ID to validate + + Returns: + TaskModel: The task model if found + + Raises: + ValueError: If task doesn't exist, with ApiErrorResponse + """ + try: + # Validate ObjectId format + ObjectId(task_id) + except Exception: + raise ValueError( + ApiErrorResponse( + statusCode=400, + message=ApiErrors.INVALID_TASK_ID, + errors=[ + ApiErrorDetail( + source={ApiErrorSource.PARAMETER: "taskId"}, + title=ApiErrors.VALIDATION_ERROR, + detail=ApiErrors.INVALID_TASK_ID, + ) + ], + ) + ) + + task = TaskRepository.get_by_id(task_id) + if not task: + raise ValueError( + ApiErrorResponse( + statusCode=404, + message=ApiErrors.TASK_NOT_FOUND.format(task_id), + errors=[ + ApiErrorDetail( + source={ApiErrorSource.PARAMETER: "taskId"}, + title=ApiErrors.TASK_NOT_FOUND_TITLE, + detail=ApiErrors.TASK_NOT_FOUND.format(task_id), + ) + ], + ) + ) + + return task diff --git a/todo/views/auth.py b/todo/views/auth.py new file mode 100644 index 00000000..ac21dfde --- /dev/null +++ b/todo/views/auth.py @@ -0,0 +1,200 @@ +from rest_framework.views import APIView +from rest_framework.response import Response +from rest_framework.request import Request +from rest_framework import status +from django.http import HttpResponseRedirect +from django.conf import settings +from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiResponse +from drf_spectacular.types import OpenApiTypes +from todo.services.google_oauth_service import GoogleOAuthService +from todo.services.user_service import UserService +from todo.utils.jwt_utils import generate_token_pair +from todo.constants.messages import AppMessages + + +class GoogleLoginView(APIView): + @extend_schema( + operation_id="google_login", + summary="Initiate Google OAuth login", + description="Redirects to Google OAuth authorization URL or returns JSON response with auth URL", + tags=["auth"], + parameters=[ + OpenApiParameter( + name="redirectURL", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="URL to redirect after successful authentication", + required=False, + ), + OpenApiParameter( + name="format", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Response format: 'json' for JSON response, otherwise redirects", + required=False, + ), + ], + responses={ + 200: OpenApiResponse(description="Google OAuth URL generated successfully"), + 302: OpenApiResponse(description="Redirect to Google OAuth URL"), + }, + ) + def get(self, request: Request): + redirect_url = request.query_params.get("redirectURL") + auth_url, state = GoogleOAuthService.get_authorization_url(redirect_url) + request.session["oauth_state"] = state + + if request.headers.get("Accept") == "application/json" or request.query_params.get("format") == "json": + return Response( + { + "statusCode": status.HTTP_200_OK, + "message": "Google OAuth URL generated successfully", + "data": {"authUrl": auth_url, "state": state}, + } + ) + + return HttpResponseRedirect(auth_url) + + +class GoogleCallbackView(APIView): + @extend_schema( + operation_id="google_callback", + summary="Handle Google OAuth callback", + description="Processes the OAuth callback from Google and creates/updates user account", + tags=["auth"], + parameters=[ + OpenApiParameter( + name="code", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Authorization code from Google", + required=True, + ), + OpenApiParameter( + name="state", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="State parameter for CSRF protection", + required=True, + ), + OpenApiParameter( + name="error", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Error from Google OAuth", + required=False, + ), + ], + responses={ + 200: OpenApiResponse(description="OAuth callback processed successfully"), + 400: OpenApiResponse(description="Bad request - invalid parameters"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def get(self, request: Request): + code = request.query_params.get("code") + state = request.query_params.get("state") + error = request.query_params.get("error") + + todo_ui_config = settings.SERVICES.get("TODO_UI", {}) + frontend_callback = f"{todo_ui_config.get('URL', '')}/{todo_ui_config.get('REDIRECT_PATH', '')}" + + if error: + return HttpResponseRedirect(f"{frontend_callback}?error={error}") + + if not code: + return HttpResponseRedirect(f"{frontend_callback}?error=missing_code") + + if not state: + return HttpResponseRedirect(f"{frontend_callback}?error=missing_state") + + stored_state = request.session.get("oauth_state") + if not stored_state or stored_state != state: + return HttpResponseRedirect(f"{frontend_callback}?error=invalid_state") + + try: + google_data = GoogleOAuthService.handle_callback(code) + user = UserService.create_or_update_user(google_data) + tokens = generate_token_pair( + { + "user_id": str(user.id), + "name": user.name, + } + ) + + response = HttpResponseRedirect(f"{frontend_callback}?success=true") + + self._set_auth_cookies(response, tokens) + request.session.pop("oauth_state", None) + + return response + + except Exception: + return HttpResponseRedirect(f"{frontend_callback}?error=auth_failed") + + def _get_cookie_config(self): + return { + "path": "/", + "domain": settings.COOKIE_SETTINGS.get("COOKIE_DOMAIN"), + "secure": settings.COOKIE_SETTINGS.get("COOKIE_SECURE"), + "httponly": settings.COOKIE_SETTINGS.get("COOKIE_HTTPONLY"), + "samesite": settings.COOKIE_SETTINGS.get("COOKIE_SAMESITE"), + } + + def _set_auth_cookies(self, response, tokens): + config = self._get_cookie_config() + response.set_cookie( + settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME"), + tokens["access_token"], + max_age=tokens["expires_in"], + **config, + ) + response.set_cookie( + settings.COOKIE_SETTINGS.get("REFRESH_COOKIE_NAME"), + tokens["refresh_token"], + max_age=settings.JWT_CONFIG.get("REFRESH_TOKEN_LIFETIME"), + **config, + ) + + +class LogoutView(APIView): + @extend_schema( + operation_id="google_logout_post", + summary="Logout user (POST)", + description="Logout the user by clearing authentication cookies (POST method)", + tags=["auth"], + responses={ + 200: OpenApiResponse(description="Logout successful"), + }, + ) + def post(self, request: Request): + return self._handle_logout(request) + + def _handle_logout(self, request: Request): + request.session.flush() + + response = Response( + { + "statusCode": status.HTTP_200_OK, + "message": AppMessages.GOOGLE_LOGOUT_SUCCESS, + "data": {"success": True}, + } + ) + + self._clear_auth_cookies(response) + return response + + def _clear_auth_cookies(self, response): + delete_config = { + "path": "/", + "domain": settings.COOKIE_SETTINGS.get("COOKIE_DOMAIN"), + } + + response.delete_cookie(settings.COOKIE_SETTINGS.get("ACCESS_COOKIE_NAME"), **delete_config) + response.delete_cookie(settings.COOKIE_SETTINGS.get("REFRESH_COOKIE_NAME"), **delete_config) + + session_delete_config = { + "path": getattr(settings, "SESSION_COOKIE_PATH", "/"), + "domain": getattr(settings, "SESSION_COOKIE_DOMAIN"), + } + response.delete_cookie("sessionid", **session_delete_config) diff --git a/todo/views/health.py b/todo/views/health.py index 882a6af7..b0eb49f5 100644 --- a/todo/views/health.py +++ b/todo/views/health.py @@ -1,5 +1,6 @@ from rest_framework.views import APIView from rest_framework.response import Response +from drf_spectacular.utils import extend_schema, OpenApiResponse from todo.constants.health import AppHealthStatus, ComponentHealthStatus from todo_project.db.config import DatabaseManager @@ -7,6 +8,16 @@ class HealthView(APIView): + @extend_schema( + operation_id="health_check", + summary="Health check", + description="Check the health status of the application and its components", + tags=["health"], + responses={ + 200: OpenApiResponse(description="Application is healthy"), + 503: OpenApiResponse(description="Application is unhealthy"), + }, + ) def get(self, request): global database_manager is_db_healthy = database_manager.check_database_health() diff --git a/todo/views/label.py b/todo/views/label.py new file mode 100644 index 00000000..15a755ac --- /dev/null +++ b/todo/views/label.py @@ -0,0 +1,23 @@ +from rest_framework.views import APIView +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework import status + +from todo.serializers.get_labels_serializer import GetLabelQueryParamsSerializer +from todo.services.label_service import LabelService + + +class LabelListView(APIView): + def get(self, request: Request): + """ + Retrieve a paginated list of labels. + """ + query = GetLabelQueryParamsSerializer(data=request.query_params) + query.is_valid(raise_exception=True) + + response = LabelService.get_labels( + page=query.validated_data["page"], + limit=query.validated_data["limit"], + search=query.validated_data["search"], + ) + return Response(data=response.model_dump(mode="json"), status=status.HTTP_200_OK) diff --git a/todo/views/role.py b/todo/views/role.py new file mode 100644 index 00000000..e47dd2de --- /dev/null +++ b/todo/views/role.py @@ -0,0 +1,143 @@ +from rest_framework.views import APIView +from rest_framework.response import Response +from rest_framework import status +from rest_framework.request import Request +from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiResponse +from drf_spectacular.types import OpenApiTypes +from typing import Dict, Any, Callable + +from todo.serializers.get_roles_serializer import RoleQuerySerializer +from todo.services.role_service import RoleService +from todo.exceptions.global_exception_handler import GlobalExceptionHandler +from todo.exceptions.role_exceptions import ( + RoleNotFoundException, + RoleOperationException, +) + + +class BaseRoleView(APIView): + """Base class for role views with common exception handling.""" + + def _handle_exceptions(self, func: Callable) -> Response: + """ + Common exception handling for all role operations. + + Args: + func: The function to execute with exception handling + + Returns: + Response: HTTP response with appropriate error handling + """ + try: + return func() + except RoleNotFoundException as e: + error_response = GlobalExceptionHandler.handle_role_not_found(e) + return Response({"error": error_response["error"]}, status=error_response["status_code"]) + except RoleOperationException as e: + error_response = GlobalExceptionHandler.handle_role_operation_error(e) + return Response({"error": error_response["error"]}, status=error_response["status_code"]) + except Exception as e: + error_response = GlobalExceptionHandler.handle_generic_error(e) + return Response({"error": error_response["error"]}, status=error_response["status_code"]) + + +class RoleListView(BaseRoleView): + @classmethod + def _build_filters(cls, query_serializer: RoleQuerySerializer) -> Dict[str, Any]: + """ + Build filters dictionary from query parameters. + + Args: + query_serializer: Validated query serializer + + Returns: + Dict[str, Any]: Filters dictionary for the service layer + """ + filters = {} + + if query_serializer.validated_data.get("is_active") is not None: + filters["is_active"] = query_serializer.validated_data["is_active"] + + if query_serializer.validated_data.get("name"): + filters["name"] = query_serializer.validated_data["name"] + + if query_serializer.validated_data.get("scope"): + filters["scope"] = query_serializer.validated_data["scope"] + + return filters + + @extend_schema( + operation_id="get_roles", + summary="Get all predefined roles", + description="Retrieve all predefined roles from the system. Roles are created via migration and cannot be modified through API.", + tags=["roles"], + parameters=[ + OpenApiParameter( + name="is_active", + type=OpenApiTypes.BOOL, + location=OpenApiParameter.QUERY, + description="Filter by active status", + ), + OpenApiParameter( + name="name", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Filter by role name", + ), + OpenApiParameter( + name="scope", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Filter by role scope (GLOBAL/TEAM)", + ), + ], + responses={ + 200: OpenApiResponse(description="Roles retrieved successfully"), + 400: OpenApiResponse(description="Bad request"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def get(self, request: Request): + """Get all predefined roles with optional filtering.""" + + def _execute(): + query_serializer = RoleQuerySerializer(data=request.query_params) + query_serializer.is_valid(raise_exception=True) + + filters = self._build_filters(query_serializer) + role_dtos = RoleService.get_all_roles(filters=filters) + roles_data = [role_dto.model_dump() for role_dto in role_dtos] + + return Response({"roles": roles_data, "total": len(roles_data)}, status=status.HTTP_200_OK) + + return self._handle_exceptions(_execute) + + +class RoleDetailView(BaseRoleView): + @extend_schema( + operation_id="get_role_by_id", + summary="Get role by ID", + description="Retrieve a single predefined role by its unique identifier", + tags=["roles"], + parameters=[ + OpenApiParameter( + name="role_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the role", + ), + ], + responses={ + 200: OpenApiResponse(description="Role retrieved successfully"), + 404: OpenApiResponse(description="Role not found"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def get(self, request: Request, role_id: str): + """Get a single role by ID.""" + + def _execute(): + role_dto = RoleService.get_role_by_id(role_id) + return Response({"role": role_dto.model_dump()}, status=status.HTTP_200_OK) + + return self._handle_exceptions(_execute) diff --git a/todo/views/task.py b/todo/views/task.py index 96cfa45a..a7e42731 100644 --- a/todo/views/task.py +++ b/todo/views/task.py @@ -1,29 +1,132 @@ +from bson import ObjectId from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from rest_framework.request import Request +from rest_framework.exceptions import AuthenticationFailed, ValidationError from django.conf import settings - +from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiResponse +from drf_spectacular.types import OpenApiTypes +from todo.middlewares.jwt_auth import get_current_user_info from todo.serializers.get_tasks_serializer import GetTaskQueryParamsSerializer from todo.serializers.create_task_serializer import CreateTaskSerializer +from todo.serializers.update_task_serializer import UpdateTaskSerializer +from todo.serializers.defer_task_serializer import DeferTaskSerializer from todo.services.task_service import TaskService from todo.dto.task_dto import CreateTaskDTO -from todo.dto.responses.error_response import ApiErrorResponse, ApiErrorDetail, ApiErrorSource from todo.dto.responses.create_task_response import CreateTaskResponse +from todo.dto.responses.get_task_by_id_response import GetTaskByIdResponse +from todo.dto.responses.error_response import ( + ApiErrorResponse, + ApiErrorDetail, + ApiErrorSource, +) from todo.constants.messages import ApiErrors +from todo.constants.messages import ValidationErrors +from todo.dto.responses.get_tasks_response import GetTasksResponse +from todo.serializers.create_task_assignment_serializer import AssignTaskToUserSerializer +from todo.services.task_assignment_service import TaskAssignmentService +from todo.dto.responses.create_task_assignment_response import CreateTaskAssignmentResponse +from todo.dto.task_assignment_dto import CreateTaskAssignmentDTO +from todo.exceptions.task_exceptions import TaskNotFoundException + -class TaskView(APIView): +class TaskListView(APIView): + @extend_schema( + operation_id="get_tasks", + summary="Get paginated list of tasks", + description="Retrieve a paginated list of tasks with optional filtering and sorting. Each task now includes an 'in_watchlist' property indicating the watchlist status: true if actively watched, false if in watchlist but inactive, or null if not in watchlist.", + tags=["tasks"], + parameters=[ + OpenApiParameter( + name="page", + type=OpenApiTypes.INT, + location=OpenApiParameter.QUERY, + description="Page number for pagination", + ), + OpenApiParameter( + name="limit", + type=OpenApiTypes.INT, + location=OpenApiParameter.QUERY, + description="Number of tasks per page", + ), + OpenApiParameter( + name="teamId", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="If provided, filters tasks assigned to this team.", + required=False, + ), + OpenApiParameter( + name="status", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="If provided, filters tasks by status (e.g., 'DONE', 'IN_PROGRESS', 'TODO', 'BLOCKED', 'DEFERRED').", + required=False, + ), + ], + responses={ + 200: OpenApiResponse(response=GetTasksResponse, description="Successful response"), + 400: OpenApiResponse(description="Bad request"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) def get(self, request: Request): """ - Retrieve a paginated list of tasks. + Retrieve a paginated list of tasks, or if profile=true, only the current user's tasks. """ query = GetTaskQueryParamsSerializer(data=request.query_params) query.is_valid(raise_exception=True) + if query.validated_data["profile"]: + user = get_current_user_info(request) + if not user: + raise AuthenticationFailed(ApiErrors.AUTHENTICATION_FAILED) + status_filter = query.validated_data.get("status", "").upper() + response = TaskService.get_tasks_for_user( + user_id=user["user_id"], + page=query.validated_data["page"], + limit=query.validated_data["limit"], + status_filter=status_filter, + ) + return Response(data=response.model_dump(mode="json"), status=status.HTTP_200_OK) - response = TaskService.get_tasks(page=query.validated_data["page"], limit=query.validated_data["limit"]) + user = get_current_user_info(request) + if query.validated_data["profile"]: + response = TaskService.get_tasks_for_user( + user_id=user["user_id"], + page=query.validated_data["page"], + limit=query.validated_data["limit"], + ) + return Response( + data=response.model_dump(mode="json", exclude_none=True), + status=status.HTTP_200_OK, + ) - return Response(data=response.model_dump(mode="json", exclude_none=True), status=status.HTTP_200_OK) + team_id = query.validated_data.get("teamId") + status_filter = query.validated_data.get("status") + response = TaskService.get_tasks( + page=query.validated_data["page"], + limit=query.validated_data["limit"], + sort_by=query.validated_data["sort_by"], + order=query.validated_data.get("order"), + user_id=user["user_id"], + team_id=team_id, + status_filter=status_filter, + ) + return Response(data=response.model_dump(mode="json"), status=status.HTTP_200_OK) + @extend_schema( + operation_id="create_task", + summary="Create a new task", + description="Create a new task with the provided details", + tags=["tasks"], + request=CreateTaskSerializer, + responses={ + 201: OpenApiResponse(description="Task created successfully"), + 400: OpenApiResponse(description="Bad request"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) def post(self, request: Request): """ Create a new task. @@ -34,13 +137,15 @@ def post(self, request: Request): Returns: Response: HTTP response with created task data or error details """ + user = get_current_user_info(request) + serializer = CreateTaskSerializer(data=request.data) if not serializer.is_valid(): return self._handle_validation_errors(serializer.errors) try: - dto = CreateTaskDTO(**serializer.validated_data) + dto = CreateTaskDTO(**serializer.validated_data, createdBy=user["user_id"]) response: CreateTaskResponse = TaskService.create_task(dto) return Response(data=response.model_dump(mode="json"), status=status.HTTP_201_CREATED) @@ -48,15 +153,224 @@ def post(self, request: Request): except ValueError as e: if isinstance(e.args[0], ApiErrorResponse): error_response = e.args[0] - return Response(data=error_response.model_dump(mode="json"), status=error_response.statusCode) + return Response( + data=error_response.model_dump(mode="json"), + status=error_response.statusCode, + ) + + fallback_response = ApiErrorResponse( + statusCode=500, + message=ApiErrors.UNEXPECTED_ERROR_OCCURRED, + errors=[{"detail": (str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR)}], + ) + return Response( + data=fallback_response.model_dump(mode="json"), + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + def _handle_validation_errors(self, errors): + formatted_errors = [] + for field, messages in errors.items(): + if isinstance(messages, list): + for message in messages: + formatted_errors.append( + ApiErrorDetail( + source={ApiErrorSource.PARAMETER: field}, + title=ApiErrors.VALIDATION_ERROR, + detail=str(message), + ) + ) + else: + formatted_errors.append( + ApiErrorDetail( + source={ApiErrorSource.PARAMETER: field}, + title=ApiErrors.VALIDATION_ERROR, + detail=str(messages), + ) + ) + + error_response = ApiErrorResponse(statusCode=400, message=ApiErrors.VALIDATION_ERROR, errors=formatted_errors) + + return Response( + data=error_response.model_dump(mode="json"), + status=status.HTTP_400_BAD_REQUEST, + ) + + +class TaskDetailView(APIView): + @extend_schema( + operation_id="get_task_by_id", + summary="Get task by ID", + description="Retrieve a single task by its unique identifier", + tags=["tasks"], + parameters=[ + OpenApiParameter( + name="task_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the task", + ), + ], + responses={ + 200: OpenApiResponse(description="Task retrieved successfully"), + 404: OpenApiResponse(description="Task not found"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def get(self, request: Request, task_id: str): + """ + Retrieve a single task by ID. + """ + task_dto = TaskService.get_task_by_id(task_id) + response_data = GetTaskByIdResponse(data=task_dto) + return Response(data=response_data.model_dump(mode="json"), status=status.HTTP_200_OK) + + @extend_schema( + operation_id="delete_task", + summary="Delete task", + description="Delete a task by its unique identifier", + tags=["tasks"], + parameters=[ + OpenApiParameter( + name="task_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the task to delete", + ), + ], + responses={ + 204: OpenApiResponse(description="Task deleted successfully"), + 404: OpenApiResponse(description="Task not found"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def delete(self, request: Request, task_id: str): + user = get_current_user_info(request) + task_id = ObjectId(task_id) + TaskService.delete_task(task_id, user["user_id"]) + return Response(status=status.HTTP_204_NO_CONTENT) + + @extend_schema( + operation_id="update_task", + summary="Update or defer task", + description="Partially update a task or defer it based on the action parameter", + tags=["tasks"], + parameters=[ + OpenApiParameter( + name="task_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the task", + ), + OpenApiParameter( + name="action", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Action to perform: TODO, IN_PROGRESS, DONE", + ), + ], + request=UpdateTaskSerializer, + responses={ + 200: OpenApiResponse(description="Task updated successfully"), + 400: OpenApiResponse(description="Bad request"), + 404: OpenApiResponse(description="Task not found"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def patch(self, request: Request, task_id: str): + """ + Partially updates a task by its ID. + Can also be used to defer a task by using ?action=defer query parameter. + """ + action = request.query_params.get("action", "update") + user = get_current_user_info(request) + + if action == "defer": + serializer = DeferTaskSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + + updated_task_dto = TaskService.defer_task( + task_id=task_id, + deferred_till=serializer.validated_data["deferredTill"], + user_id=user["user_id"], + ) + elif action == "update": + serializer = UpdateTaskSerializer(data=request.data, partial=True) + + serializer.is_valid(raise_exception=True) + + updated_task_dto = TaskService.update_task( + task_id=task_id, + validated_data=serializer.validated_data, + user_id=user["user_id"], + ) + else: + raise ValidationError({"action": ValidationErrors.UNSUPPORTED_ACTION.format(action)}) + + return Response(data=updated_task_dto.model_dump(mode="json"), status=status.HTTP_200_OK) + + +class TaskUpdateView(APIView): + @extend_schema( + operation_id="update_task_and_assignee", + summary="Update task and assignee details", + description="Update both task details and assignee information in a single request. Similar to task creation but for updates.", + tags=["tasks"], + parameters=[ + OpenApiParameter( + name="task_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the task to update", + required=True, + ), + ], + request=UpdateTaskSerializer, + responses={ + 200: OpenApiResponse(description="Task and assignee updated successfully"), + 400: OpenApiResponse(description="Bad request"), + 404: OpenApiResponse(description="Task not found"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def patch(self, request: Request, task_id: str): + """ + Update both task details and assignee information in a single request. + Similar to task creation but for updates. + """ + user = get_current_user_info(request) + if not user: + raise AuthenticationFailed(ApiErrors.AUTHENTICATION_FAILED) + + serializer = UpdateTaskSerializer(data=request.data, partial=True) + + if not serializer.is_valid(): + return self._handle_validation_errors(serializer.errors) + + try: + # Update the task using the service with validated data + updated_task_dto = TaskService.update_task_with_assignee_from_dict( + task_id=task_id, validated_data=serializer.validated_data, user_id=user["user_id"] + ) + + return Response(data=updated_task_dto.model_dump(mode="json"), status=status.HTTP_200_OK) + + except (ValueError, TaskNotFoundException, PermissionError) as e: + if isinstance(e, ValueError) and e.args and isinstance(e.args[0], ApiErrorResponse): + error_response = e.args[0] + return Response( + data=error_response.model_dump(mode="json"), + status=error_response.statusCode, + ) fallback_response = ApiErrorResponse( statusCode=500, message=ApiErrors.UNEXPECTED_ERROR_OCCURRED, - errors=[{"detail": str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR}], + errors=[{"detail": (str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR)}], ) return Response( - data=fallback_response.model_dump(mode="json"), status=status.HTTP_500_INTERNAL_SERVER_ERROR + data=fallback_response.model_dump(mode="json"), + status=status.HTTP_500_INTERNAL_SERVER_ERROR, ) def _handle_validation_errors(self, errors): @@ -74,10 +388,65 @@ def _handle_validation_errors(self, errors): else: formatted_errors.append( ApiErrorDetail( - source={ApiErrorSource.PARAMETER: field}, title=ApiErrors.VALIDATION_ERROR, detail=str(messages) + source={ApiErrorSource.PARAMETER: field}, + title=ApiErrors.VALIDATION_ERROR, + detail=str(messages), ) ) error_response = ApiErrorResponse(statusCode=400, message=ApiErrors.VALIDATION_ERROR, errors=formatted_errors) - return Response(data=error_response.model_dump(mode="json"), status=status.HTTP_400_BAD_REQUEST) + return Response( + data=error_response.model_dump(mode="json"), + status=status.HTTP_400_BAD_REQUEST, + ) + + +class AssignTaskToUserView(APIView): + @extend_schema( + operation_id="assign_task_to_user", + summary="Assign task to a user", + description="Assign a task to a user by user ID. Only authorized users can perform this action.", + tags=["task-assignments"], + request=AssignTaskToUserSerializer, + parameters=[ + OpenApiParameter( + name="task_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the task", + required=True, + ), + ], + responses={ + 200: OpenApiResponse(response=CreateTaskAssignmentResponse, description="Task assigned successfully"), + 400: OpenApiResponse( + response=ApiErrorResponse, description="Bad request - validation error or assignee not found" + ), + 404: OpenApiResponse(response=ApiErrorResponse, description="Task not found"), + 401: OpenApiResponse(response=ApiErrorResponse, description="Unauthorized"), + 500: OpenApiResponse(response=ApiErrorResponse, description="Internal server error"), + }, + ) + def patch(self, request: Request, task_id: str): + user = get_current_user_info(request) + if not user: + raise AuthenticationFailed(ApiErrors.AUTHENTICATION_FAILED) + + serializer = AssignTaskToUserSerializer(data=request.data) + if not serializer.is_valid(): + return Response(data={"errors": serializer.errors}, status=status.HTTP_400_BAD_REQUEST) + + try: + dto = CreateTaskAssignmentDTO( + task_id=task_id, assignee_id=serializer.validated_data["assignee_id"], user_type="user" + ) + response: CreateTaskAssignmentResponse = TaskAssignmentService.create_task_assignment(dto, user["user_id"]) + return Response(data=response.model_dump(mode="json"), status=status.HTTP_200_OK) + except Exception as e: + error_response = ApiErrorResponse( + statusCode=500, + message=ApiErrors.UNEXPECTED_ERROR_OCCURRED, + errors=[{"detail": str(e)}], + ) + return Response(data=error_response.model_dump(mode="json"), status=status.HTTP_500_INTERNAL_SERVER_ERROR) diff --git a/todo/views/task_assignment.py b/todo/views/task_assignment.py new file mode 100644 index 00000000..982f1699 --- /dev/null +++ b/todo/views/task_assignment.py @@ -0,0 +1,322 @@ +from rest_framework.views import APIView +from rest_framework.response import Response +from rest_framework import status +from rest_framework.request import Request +from rest_framework.exceptions import AuthenticationFailed +from django.conf import settings +from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiResponse +from drf_spectacular.types import OpenApiTypes +from rest_framework import serializers + +from todo.middlewares.jwt_auth import get_current_user_info +from todo.serializers.create_task_assignment_serializer import CreateTaskAssignmentSerializer +from todo.services.task_assignment_service import TaskAssignmentService +from todo.dto.task_assignment_dto import CreateTaskAssignmentDTO +from todo.dto.responses.create_task_assignment_response import CreateTaskAssignmentResponse +from todo.dto.responses.error_response import ApiErrorResponse +from todo.constants.messages import ApiErrors +from todo.exceptions.user_exceptions import UserNotFoundException +from todo.exceptions.task_exceptions import TaskNotFoundException + + +class TaskAssignmentView(APIView): + @extend_schema( + operation_id="create_task_assignment", + summary="Assign task to user or team", + description="Assign a task to either a user or a team. The system will validate that both the task and assignee exist before creating the assignment.", + tags=["task-assignments"], + request=CreateTaskAssignmentSerializer, + responses={ + 201: OpenApiResponse( + response=CreateTaskAssignmentResponse, description="Task assignment created successfully" + ), + 400: OpenApiResponse( + response=ApiErrorResponse, description="Bad request - validation error or assignee not found" + ), + 404: OpenApiResponse(response=ApiErrorResponse, description="Task not found"), + 500: OpenApiResponse(response=ApiErrorResponse, description="Internal server error"), + }, + ) + def post(self, request: Request): + """ + Assign a task to a user or team. + + This endpoint allows you to assign a task to either a user or a team. + The system will validate that: + - The task exists in the database + - The assignee (user or team) exists in the database + - If a task already has an assignment, it will be updated + + Args: + request: HTTP request containing task assignment data + + Returns: + Response: HTTP response with created assignment data or error details + """ + user = get_current_user_info(request) + if not user: + raise AuthenticationFailed(ApiErrors.AUTHENTICATION_FAILED) + + serializer = CreateTaskAssignmentSerializer(data=request.data) + if not serializer.is_valid(): + return Response(data={"errors": serializer.errors}, status=status.HTTP_400_BAD_REQUEST) + + try: + dto = CreateTaskAssignmentDTO(**serializer.validated_data) + response: CreateTaskAssignmentResponse = TaskAssignmentService.create_task_assignment(dto, user["user_id"]) + + return Response(data=response.model_dump(mode="json"), status=status.HTTP_201_CREATED) + + except TaskNotFoundException as e: + error_response = ApiErrorResponse(statusCode=404, message="Task not found", errors=[{"detail": str(e)}]) + return Response(data=error_response.model_dump(mode="json"), status=status.HTTP_404_NOT_FOUND) + + except UserNotFoundException as e: + error_response = ApiErrorResponse(statusCode=400, message="Assignee not found", errors=[{"detail": str(e)}]) + return Response(data=error_response.model_dump(mode="json"), status=status.HTTP_400_BAD_REQUEST) + + except ValueError as e: + error_response = ApiErrorResponse(statusCode=400, message="Validation error", errors=[{"detail": str(e)}]) + return Response(data=error_response.model_dump(mode="json"), status=status.HTTP_400_BAD_REQUEST) + + except Exception as e: + fallback_response = ApiErrorResponse( + statusCode=500, + message=ApiErrors.UNEXPECTED_ERROR_OCCURRED, + errors=[{"detail": str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR}], + ) + return Response( + data=fallback_response.model_dump(mode="json"), status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) + + +class ExecutorUpdateSerializer(serializers.Serializer): + executor_id = serializers.CharField(help_text="User ID of the new executor (must be a member of the team)") + + +class TaskAssignmentDetailView(APIView): + @extend_schema( + operation_id="get_task_assignment", + summary="Get task assignment by task ID", + description="Retrieve the assignment details for a specific task", + tags=["task-assignments"], + parameters=[ + OpenApiParameter( + name="task_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the task", + required=True, + ), + ], + responses={ + 200: OpenApiResponse( + response=CreateTaskAssignmentResponse, description="Task assignment retrieved successfully" + ), + 404: OpenApiResponse(response=ApiErrorResponse, description="Task assignment not found"), + 500: OpenApiResponse(response=ApiErrorResponse, description="Internal server error"), + }, + ) + def get(self, request: Request, task_id: str): + """ + Get task assignment by task ID. + + Args: + request: HTTP request + task_id: ID of the task to get assignment for + + Returns: + Response: HTTP response with assignment data or error details + """ + try: + assignment = TaskAssignmentService.get_task_assignment(task_id) + if not assignment: + error_response = ApiErrorResponse( + statusCode=404, + message="Task assignment not found", + errors=[{"detail": f"No assignment found for task {task_id}"}], + ) + return Response(data=error_response.model_dump(mode="json"), status=status.HTTP_404_NOT_FOUND) + + return Response(data=assignment.model_dump(mode="json"), status=status.HTTP_200_OK) + + except Exception as e: + fallback_response = ApiErrorResponse( + statusCode=500, + message=ApiErrors.UNEXPECTED_ERROR_OCCURRED, + errors=[{"detail": str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR}], + ) + return Response( + data=fallback_response.model_dump(mode="json"), status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) + + @extend_schema( + operation_id="set_executor_for_team_task", + summary="Set or update executor for a team-assigned task (SPOC only)", + description="Allows the SPOC of a team to set or update the executor (user within the team) for a team-assigned task. All SPOC re-assignments are logged in the audit trail.", + tags=["task-assignments"], + request=ExecutorUpdateSerializer, + parameters=[ + OpenApiParameter( + name="task_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the task", + required=True, + ), + ], + responses={ + 200: OpenApiResponse(description="Executor updated successfully"), + 403: OpenApiResponse(description="Forbidden - only SPOC can update executor for team task"), + 404: OpenApiResponse(description="Task assignment not found"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def patch(self, request: Request, task_id: str): + """ + Set or update the executor for a team-assigned task. Only the SPOC can perform this action. + For user assignments, this endpoint is not applicable. + """ + user = get_current_user_info(request) + if not user: + raise AuthenticationFailed(ApiErrors.AUTHENTICATION_FAILED) + + executor_id = request.data.get("executor_id") + if not executor_id: + return Response({"error": "executor_id is required"}, status=status.HTTP_400_BAD_REQUEST) + + # Fetch the assignment and check if it's a team assignment + from todo.repositories.task_assignment_repository import TaskAssignmentRepository + from todo.repositories.team_repository import TeamRepository + from todo.repositories.user_repository import UserRepository + + assignment = TaskAssignmentRepository.get_by_task_id(task_id) + if not assignment: + return Response({"error": "Task assignment not found."}, status=status.HTTP_404_NOT_FOUND) + + # Check if it's a team assignment + if assignment.user_type != "team": + return Response( + { + "error": "This endpoint is only for team assignments. For user assignments, the assignee is the executor." + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Only SPOC can update executor + if not TeamRepository.is_user_spoc(str(assignment.assignee_id), user["user_id"]): + return Response( + {"error": "Only the SPOC can update executor for this team task."}, status=status.HTTP_403_FORBIDDEN + ) + + # Validate that the executor_id user exists + executor_user = UserRepository.get_by_id(executor_id) + if not executor_user: + return Response( + {"error": f"User with ID {executor_id} does not exist."}, status=status.HTTP_400_BAD_REQUEST + ) + + # Validate that the executor is a member of the team + if not TeamRepository.is_user_team_member(str(assignment.assignee_id), executor_id): + return Response( + {"error": f"User {executor_id} is not a member of the team."}, status=status.HTTP_400_BAD_REQUEST + ) + # Update executor_id + try: + updated_assignment = TaskAssignmentRepository.update_assignment( + task_id, executor_id, "user", user["user_id"] + ) + if not updated_assignment: + # Get more details about why it failed + import traceback + + print( + f"DEBUG: update_executor failed for task_id={task_id}, executor_id={executor_id}, user_id={user['user_id']}" + ) + print(f"DEBUG: assignment details: {assignment}") + return Response( + {"error": "Failed to update assignment. Check server logs for details."}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + except Exception as e: + print(f"DEBUG: Exception in update_executor: {str(e)}") + import traceback + + traceback.print_exc() + return Response( + {"error": f"Exception during update: {str(e)}"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) + + # Audit log + from todo.models.audit_log import AuditLogModel + from todo.repositories.audit_log_repository import AuditLogRepository + + previous_executor_id = assignment.executor_id if hasattr(assignment, "executor_id") else None + audit_log = AuditLogModel( + task_id=assignment.task_id, + team_id=assignment.assignee_id, + previous_executor_id=previous_executor_id, + new_executor_id=executor_id, + spoc_id=user["user_id"], + action="reassign_executor", + ) + AuditLogRepository.create(audit_log) + + return Response({"message": "Executor updated successfully."}, status=status.HTTP_200_OK) + + @extend_schema( + operation_id="delete_task_assignment", + summary="Delete task assignment", + description="Remove the assignment for a specific task", + tags=["task-assignments"], + parameters=[ + OpenApiParameter( + name="task_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the task", + required=True, + ), + ], + responses={ + 204: OpenApiResponse(description="Task assignment deleted successfully"), + 404: OpenApiResponse(response=ApiErrorResponse, description="Task assignment not found"), + 500: OpenApiResponse(response=ApiErrorResponse, description="Internal server error"), + }, + ) + def delete(self, request: Request, task_id: str): + """ + Delete task assignment by task ID. + + Args: + request: HTTP request + task_id: ID of the task to delete assignment for + + Returns: + Response: HTTP response with success or error details + """ + user = get_current_user_info(request) + if not user: + raise AuthenticationFailed(ApiErrors.AUTHENTICATION_FAILED) + + try: + success = TaskAssignmentService.delete_task_assignment(task_id, user["user_id"]) + if not success: + error_response = ApiErrorResponse( + statusCode=404, + message="Task assignment not found", + errors=[{"detail": f"No assignment found for task {task_id}"}], + ) + return Response(data=error_response.model_dump(mode="json"), status=status.HTTP_404_NOT_FOUND) + + return Response(status=status.HTTP_204_NO_CONTENT) + + except Exception as e: + fallback_response = ApiErrorResponse( + statusCode=500, + message=ApiErrors.UNEXPECTED_ERROR_OCCURRED, + errors=[{"detail": str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR}], + ) + return Response( + data=fallback_response.model_dump(mode="json"), status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) diff --git a/todo/views/team.py b/todo/views/team.py new file mode 100644 index 00000000..03e2079d --- /dev/null +++ b/todo/views/team.py @@ -0,0 +1,483 @@ +from rest_framework.views import APIView +from rest_framework.response import Response +from rest_framework import status +from rest_framework.request import Request +from django.conf import settings + +from todo.serializers.create_team_serializer import CreateTeamSerializer, JoinTeamByInviteCodeSerializer +from todo.serializers.update_team_serializer import UpdateTeamSerializer +from todo.serializers.add_team_member_serializer import AddTeamMemberSerializer +from todo.services.team_service import TeamService +from todo.dto.team_dto import CreateTeamDTO +from todo.dto.update_team_dto import UpdateTeamDTO +from todo.dto.responses.create_team_response import CreateTeamResponse +from todo.dto.responses.get_user_teams_response import GetUserTeamsResponse +from todo.dto.responses.error_response import ApiErrorResponse, ApiErrorDetail, ApiErrorSource +from todo.constants.messages import ApiErrors +from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiResponse +from drf_spectacular.types import OpenApiTypes +from todo.dto.team_dto import TeamDTO +from todo.services.user_service import UserService +from todo.repositories.team_repository import TeamRepository +from todo.repositories.audit_log_repository import AuditLogRepository +from todo.repositories.user_repository import UserRepository +from todo.repositories.task_repository import TaskRepository + + +class TeamListView(APIView): + def get(self, request: Request): + """ + Get all teams assigned to the authenticated user. + """ + try: + user_id = request.user_id + response: GetUserTeamsResponse = TeamService.get_user_teams(user_id) + data = response.model_dump(mode="json") + for team in data.get("teams", []): + team.pop("invite_code", None) + return Response(data=data, status=status.HTTP_200_OK) + + except ValueError as e: + if isinstance(e.args[0], ApiErrorResponse): + error_response = e.args[0] + return Response(data=error_response.model_dump(mode="json"), status=error_response.statusCode) + + fallback_response = ApiErrorResponse( + statusCode=500, + message=ApiErrors.UNEXPECTED_ERROR_OCCURRED, + errors=[{"detail": str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR}], + ) + return Response( + data=fallback_response.model_dump(mode="json"), status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) + + @extend_schema( + operation_id="create_team", + summary="Create a new team", + description="Create a new team with the provided details. The creator is always added as a member, even if not in member_ids or as POC. **Note:** A valid team invite code is required in the request payload.", + tags=["teams"], + request=CreateTeamSerializer, + responses={ + 201: OpenApiResponse(response=CreateTeamResponse, description="Team created successfully"), + 400: OpenApiResponse(description="Bad request - validation error or invalid team invite code"), + 401: OpenApiResponse(description="Unauthorized - authentication required"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def post(self, request: Request): + """ + Create a new team. + """ + serializer = CreateTeamSerializer(data=request.data) + + if not serializer.is_valid(): + return self._handle_validation_errors(serializer.errors) + + try: + dto = CreateTeamDTO(**serializer.validated_data) + created_by_user_id = request.user_id + response: CreateTeamResponse = TeamService.create_team(dto, created_by_user_id) + data = response.model_dump(mode="json") + return Response(data=data, status=status.HTTP_201_CREATED) + + except ValueError as e: + if isinstance(e.args[0], ApiErrorResponse): + error_response = e.args[0] + return Response(data=error_response.model_dump(mode="json"), status=error_response.statusCode) + + fallback_response = ApiErrorResponse( + statusCode=500, + message=ApiErrors.UNEXPECTED_ERROR_OCCURRED, + errors=[{"detail": str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR}], + ) + return Response( + data=fallback_response.model_dump(mode="json"), status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) + + def _handle_validation_errors(self, errors): + formatted_errors = [] + for field, messages in errors.items(): + if isinstance(messages, list): + for message in messages: + formatted_errors.append( + ApiErrorDetail( + source={ApiErrorSource.PARAMETER: field}, + title=ApiErrors.VALIDATION_ERROR, + detail=str(message), + ) + ) + else: + formatted_errors.append( + ApiErrorDetail( + source={ApiErrorSource.PARAMETER: field}, title=ApiErrors.VALIDATION_ERROR, detail=str(messages) + ) + ) + + error_response = ApiErrorResponse(statusCode=400, message=ApiErrors.VALIDATION_ERROR, errors=formatted_errors) + + return Response(data=error_response.model_dump(mode="json"), status=status.HTTP_400_BAD_REQUEST) + + +class TeamDetailView(APIView): + @extend_schema( + operation_id="get_team_by_id", + summary="Get team by ID", + description="Retrieve a single team by its unique identifier. Optionally, set ?member=true to get users belonging to this team.", + tags=["teams"], + parameters=[ + OpenApiParameter( + name="team_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the team", + ), + OpenApiParameter( + name="member", + type=OpenApiTypes.BOOL, + location=OpenApiParameter.QUERY, + description="If true, returns users that belong to this team instead of team details.", + required=False, + ), + ], + responses={ + 200: OpenApiResponse(description="Team or team members retrieved successfully"), + 404: OpenApiResponse(description="Team not found"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def get(self, request: Request, team_id: str): + """ + Retrieve a single team by ID, or users in the team if ?member=true. + """ + try: + team_dto: TeamDTO = TeamService.get_team_by_id(team_id) + member = request.query_params.get("member", "false").lower() == "true" + if member: + users = UserService.get_users_by_team_id(team_id) + users_data = [user.dict() for user in users] + team_dto.users = users_data + data = team_dto.model_dump(mode="json") + data.pop("invite_code", None) + return Response(data=data, status=status.HTTP_200_OK) + except ValueError as e: + fallback_response = ApiErrorResponse( + statusCode=404, + message=str(e), + errors=[{"detail": str(e)}], + ) + return Response(data=fallback_response.model_dump(mode="json"), status=404) + except Exception as e: + fallback_response = ApiErrorResponse( + statusCode=500, + message=ApiErrors.UNEXPECTED_ERROR_OCCURRED, + errors=[{"detail": str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR}], + ) + return Response(data=fallback_response.model_dump(mode="json"), status=500) + + @extend_schema( + operation_id="update_team", + summary="Update team by ID", + description="Update a team's details including name, description, point of contact (POC), and team members. All fields are optional - only include the fields you want to update. For member management: if member_ids is provided, it completely replaces the current team members; if member_ids is not provided, existing members remain unchanged.", + tags=["teams"], + parameters=[ + OpenApiParameter( + name="team_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the team", + ), + ], + request=UpdateTeamSerializer, + responses={ + 200: OpenApiResponse(response=TeamDTO, description="Team updated successfully"), + 400: OpenApiResponse(description="Bad request - validation error or invalid member IDs"), + 404: OpenApiResponse(description="Team not found"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def patch(self, request: Request, team_id: str): + """ + Update a team by ID. + """ + serializer = UpdateTeamSerializer(data=request.data) + + if not serializer.is_valid(): + return self._handle_validation_errors(serializer.errors) + + try: + dto = UpdateTeamDTO(**serializer.validated_data) + updated_by_user_id = request.user_id + response: TeamDTO = TeamService.update_team(team_id, dto, updated_by_user_id) + data = response.model_dump(mode="json") + data.pop("invite_code", None) + return Response(data=data, status=status.HTTP_200_OK) + + except ValueError as e: + if isinstance(e.args[0], ApiErrorResponse): + error_response = e.args[0] + return Response(data=error_response.model_dump(mode="json"), status=error_response.statusCode) + + fallback_response = ApiErrorResponse( + statusCode=404, + message=str(e), + errors=[{"detail": str(e)}], + ) + return Response(data=fallback_response.model_dump(mode="json"), status=404) + except Exception as e: + fallback_response = ApiErrorResponse( + statusCode=500, + message=ApiErrors.UNEXPECTED_ERROR_OCCURRED, + errors=[{"detail": str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR}], + ) + return Response(data=fallback_response.model_dump(mode="json"), status=500) + + +class JoinTeamByInviteCodeView(APIView): + @extend_schema( + operation_id="join_team_by_invite_code", + summary="Join a team by invite code", + description="Join a team using a valid invite code. Returns the joined team details.", + tags=["teams"], + request=JoinTeamByInviteCodeSerializer, + responses={ + 200: OpenApiResponse(response=TeamDTO, description="Joined team successfully"), + 400: OpenApiResponse(description="Bad request - validation error or already a member"), + 404: OpenApiResponse(description="Team not found or invalid invite code"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def post(self, request: Request): + serializer = JoinTeamByInviteCodeSerializer(data=request.data) + if not serializer.is_valid(): + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + try: + user_id = request.user_id + invite_code = serializer.validated_data["invite_code"] + team_dto = TeamService.join_team_by_invite_code(invite_code, user_id) + data = team_dto.model_dump(mode="json") + data.pop("invite_code", None) + return Response(data=data, status=status.HTTP_200_OK) + except ValueError as e: + return Response({"detail": str(e)}, status=status.HTTP_400_BAD_REQUEST) + except Exception as e: + return Response({"detail": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + +class AddTeamMembersView(APIView): + @extend_schema( + operation_id="add_team_members", + summary="Add members to a team", + description="Add new members to a team. Only existing team members can add other members.", + tags=["teams"], + parameters=[ + OpenApiParameter( + name="team_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the team", + ), + ], + request=AddTeamMemberSerializer, + responses={ + 200: OpenApiResponse(response=TeamDTO, description="Team members added successfully"), + 400: OpenApiResponse(description="Bad request - validation error or user not a team member"), + 404: OpenApiResponse(description="Team not found"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def post(self, request: Request, team_id: str): + """ + Add members to a team. Only existing team members can add other members. + """ + serializer = AddTeamMemberSerializer(data=request.data) + + if not serializer.is_valid(): + return self._handle_validation_errors(serializer.errors) + + try: + member_ids = serializer.validated_data["member_ids"] + added_by_user_id = request.user_id + response: TeamDTO = TeamService.add_team_members(team_id, member_ids, added_by_user_id) + data = response.model_dump(mode="json") + data.pop("invite_code", None) + return Response(data=data, status=status.HTTP_200_OK) + + except ValueError as e: + if isinstance(e.args[0], ApiErrorResponse): + error_response = e.args[0] + return Response(data=error_response.model_dump(mode="json"), status=error_response.statusCode) + + fallback_response = ApiErrorResponse( + statusCode=400, + message=str(e), + errors=[{"detail": str(e)}], + ) + return Response(data=fallback_response.model_dump(mode="json"), status=400) + except Exception as e: + fallback_response = ApiErrorResponse( + statusCode=500, + message=ApiErrors.UNEXPECTED_ERROR_OCCURRED, + errors=[{"detail": str(e) if settings.DEBUG else ApiErrors.INTERNAL_SERVER_ERROR}], + ) + return Response(data=fallback_response.model_dump(mode="json"), status=500) + + def _handle_validation_errors(self, errors): + """Handle validation errors and return appropriate response.""" + error_response = ApiErrorResponse( + statusCode=400, + message=ApiErrors.VALIDATION_ERROR, + errors=[{"detail": str(error)} for error in errors.values()], + ) + return Response(data=error_response.model_dump(mode="json"), status=400) + + +class TeamInviteCodeView(APIView): + @extend_schema( + operation_id="get_team_invite_code", + summary="Get team invite code (creator or POC only)", + description="Return the invite code for a team if the requesting user is the creator or POC of the team.", + tags=["teams"], + parameters=[ + OpenApiParameter( + name="team_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the team", + required=True, + ), + ], + responses={ + 200: OpenApiResponse(description="Invite code returned successfully"), + 403: OpenApiResponse(description="Forbidden - not creator or POC"), + 404: OpenApiResponse(description="Team not found"), + }, + ) + def get(self, request: Request, team_id: str): + """ + Return the invite code for a team if the requesting user is the creator or POC of the team. + """ + user_id = request.user_id + team = TeamRepository.get_by_id(team_id) + if not team: + return Response({"detail": "Team not found."}, status=status.HTTP_404_NOT_FOUND) + is_creator = str(team.created_by) == str(user_id) + is_poc = str(team.poc_id) == str(user_id) + if is_creator or is_poc: + return Response({"invite_code": team.invite_code}, status=status.HTTP_200_OK) + return Response( + {"detail": "You are not authorized to view the invite code for this team."}, + status=status.HTTP_403_FORBIDDEN, + ) + + +class TeamActivityTimelineView(APIView): + @extend_schema( + operation_id="get_team_activity_timeline", + summary="Get team activity timeline", + description="Return a timeline of all activities related to tasks assigned to the team, including assignment, unassignment, executor changes, and status changes. All IDs are replaced with names.", + tags=["teams"], + parameters=[ + OpenApiParameter( + name="team_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the team", + required=True, + ), + ], + responses={ + 200: OpenApiResponse( + response={ + "type": "object", + "properties": { + "timeline": { + "type": "array", + "items": {"type": "object"}, + } + }, + }, + description="Team activity timeline returned successfully", + ), + 404: OpenApiResponse(description="Team not found"), + }, + ) + def get(self, request: Request, team_id: str): + team = TeamRepository.get_by_id(team_id) + if not team: + return Response({"detail": "Team not found."}, status=status.HTTP_404_NOT_FOUND) + logs = AuditLogRepository.get_by_team_id(team_id) + # Pre-fetch team name + team_name = team.name + # Pre-fetch all user and task names needed + user_ids = set() + task_ids = set() + for log in logs: + if log.performed_by: + user_ids.add(str(log.performed_by)) + if log.spoc_id: + user_ids.add(str(log.spoc_id)) + if log.previous_executor_id: + user_ids.add(str(log.previous_executor_id)) + if log.new_executor_id: + user_ids.add(str(log.new_executor_id)) + if log.task_id: + task_ids.add(str(log.task_id)) + user_map = {str(u.id): u.name for u in UserRepository.get_by_ids(list(user_ids))} + task_map = {str(t.id): t.title for t in TaskRepository.get_by_ids(list(task_ids))} + timeline = [] + for log in logs: + entry = { + "action": log.action, + "timestamp": log.timestamp, + } + if log.task_id: + entry["task_title"] = task_map.get(str(log.task_id), str(log.task_id)) + if log.team_id: + entry["team_name"] = team_name + if log.performed_by: + entry["performed_by_name"] = user_map.get(str(log.performed_by), str(log.performed_by)) + if log.spoc_id: + entry["spoc_name"] = user_map.get(str(log.spoc_id), str(log.spoc_id)) + if log.previous_executor_id: + entry["previous_executor_name"] = user_map.get( + str(log.previous_executor_id), str(log.previous_executor_id) + ) + if log.new_executor_id: + entry["new_executor_name"] = user_map.get(str(log.new_executor_id), str(log.new_executor_id)) + if log.status_from: + entry["status_from"] = log.status_from + if log.status_to: + entry["status_to"] = log.status_to + timeline.append(entry) + return Response({"timeline": timeline}, status=status.HTTP_200_OK) + + +class RemoveTeamMemberView(APIView): + @extend_schema( + summary="Remove a user from a team", + description="Removes the specified user from the specified team.", + parameters=[ + OpenApiParameter(name="team_id", type=str, location=OpenApiParameter.PATH, description="ID of the team"), + OpenApiParameter( + name="user_id", type=str, location=OpenApiParameter.PATH, description="ID of the user to remove" + ), + ], + responses={ + 204: OpenApiResponse(description="User removed from team successfully."), + 404: OpenApiResponse(description="Team or user not found."), + 400: OpenApiResponse(description="Bad request or other error."), + }, + tags=["teams"], + ) + def delete(self, request, team_id, user_id): + print(f"DEBUG: RemoveTeamMemberView.delete called with team_id={team_id}, user_id={user_id}") + from todo.services.team_service import TeamService + + try: + # Pass the user performing the removal (request.user_id) and the user being removed (user_id) + TeamService.remove_member_from_team(user_id=user_id, team_id=team_id, removed_by_user_id=request.user_id) + return Response(status=status.HTTP_204_NO_CONTENT) + except TeamService.TeamOrUserNotFound: + return Response({"detail": "Team or user not found."}, status=status.HTTP_404_NOT_FOUND) + except Exception as e: + return Response({"detail": str(e)}, status=status.HTTP_400_BAD_REQUEST) diff --git a/todo/views/team_creation_invite_code.py b/todo/views/team_creation_invite_code.py new file mode 100644 index 00000000..0db08042 --- /dev/null +++ b/todo/views/team_creation_invite_code.py @@ -0,0 +1,213 @@ +from rest_framework.views import APIView +from rest_framework.response import Response +from rest_framework import status +from rest_framework.request import Request + +from todo.serializers.team_creation_invite_code_serializer import ( + GenerateTeamCreationInviteCodeSerializer, + VerifyTeamCreationInviteCodeSerializer, +) +from todo.services.team_creation_invite_code_service import TeamCreationInviteCodeService +from todo.repositories.team_creation_invite_code_repository import TeamCreationInviteCodeRepository +from todo.dto.team_creation_invite_code_dto import GenerateTeamCreationInviteCodeDTO, VerifyTeamCreationInviteCodeDTO +from todo.dto.responses.generate_team_creation_invite_code_response import GenerateTeamCreationInviteCodeResponse +from todo.dto.responses.get_team_creation_invite_codes_response import GetTeamCreationInviteCodesResponse +from drf_spectacular.utils import extend_schema, OpenApiResponse, OpenApiExample, OpenApiParameter, OpenApiTypes +from django.conf import settings + + +class GenerateTeamCreationInviteCodeView(APIView): + def _check_authorization(self, user_email: str = None) -> bool: + """Check if the user is authorized to access team creation invite code functionality.""" + + if user_email and user_email in getattr(settings, "ADMIN_EMAILS", []): + return True + + return False + + def _handle_validation_errors(self, errors): + """Handle validation errors.""" + return Response(data={"errors": errors}, status=status.HTTP_400_BAD_REQUEST) + + @extend_schema( + operation_id="generate_team_creation_invite_code", + summary="Generate a new team creation invite code", + description="Generate a new team creation invite code. This code can only be used once and is required for team creation. Only admins can generate these codes.", + tags=["team-creation-invite-codes"], + request=GenerateTeamCreationInviteCodeSerializer, + examples=[ + OpenApiExample( + "Generate with description", + value={"description": "Code for marketing team creation"}, + description="Generate a team creation invite code with a description", + ), + OpenApiExample( + "Generate without description", + value={}, + description="Generate a team creation invite code without description", + ), + ], + responses={ + 201: OpenApiResponse( + response=GenerateTeamCreationInviteCodeResponse, + description="Team creation invite code generated successfully", + ), + 400: OpenApiResponse(description="Bad request - validation error"), + 401: OpenApiResponse(description="Unauthorized - authentication required"), + 403: OpenApiResponse(description="Forbidden - user not authorized to generate invite codes"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def post(self, request: Request): + """ + Generate a new team creation invite code. + """ + user_email = request.user_email + + if not self._check_authorization(user_email): + return Response( + data={"message": "You are not authorized to perform this action."}, + status=status.HTTP_403_FORBIDDEN, + ) + + serializer = GenerateTeamCreationInviteCodeSerializer(data=request.data) + + if not serializer.is_valid(): + return self._handle_validation_errors(serializer.errors) + + dto = GenerateTeamCreationInviteCodeDTO(**serializer.validated_data) + created_by_user_id = request.user_id + response: GenerateTeamCreationInviteCodeResponse = TeamCreationInviteCodeService.generate_code( + dto, created_by_user_id + ) + data = response.model_dump(mode="json") + return Response(data=data, status=status.HTTP_201_CREATED) + + +class VerifyTeamCreationInviteCodeView(APIView): + @extend_schema( + operation_id="verify_team_creation_invite_code", + summary="Verify a team creation invite code", + description="Verify a team creation invite code. Returns success if the code is valid and unused.", + tags=["team-creation-invite-codes"], + request=VerifyTeamCreationInviteCodeSerializer, + examples=[ + OpenApiExample( + "Verify valid code", value={"code": "ABC123"}, description="Verify a valid team creation invite code" + ), + ], + responses={ + 200: OpenApiResponse(response=dict, description="Team creation invite code verified successfully."), + 400: OpenApiResponse(description="Bad request - invalid or already used code"), + 401: OpenApiResponse(description="Unauthorized - authentication required"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def post(self, request: Request): + """ + Verify a team creation invite code. + """ + serializer = VerifyTeamCreationInviteCodeSerializer(data=request.data) + + if not serializer.is_valid(): + return self._handle_validation_errors(serializer.errors) + + dto = VerifyTeamCreationInviteCodeDTO(**serializer.validated_data) + result = TeamCreationInviteCodeRepository.is_code_valid(dto.code) + if not result: + return Response( + data={"message": "Invalid or already used team creation invite code"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + return Response(data={"message": "Team creation invite code verified successfully"}, status=status.HTTP_200_OK) + + def _handle_validation_errors(self, errors): + """Handle validation errors.""" + return Response(data={"errors": errors}, status=status.HTTP_400_BAD_REQUEST) + + +class ListTeamCreationInviteCodesView(APIView): + def _check_authorization(self, user_email: str = None) -> bool: + """Check if the user is authorized to access team creation invite code functionality.""" + + if user_email and user_email in getattr(settings, "ADMIN_EMAILS", []): + return True + + return False + + @extend_schema( + operation_id="list_team_creation_invite_codes", + summary="List team creation invite codes with pagination", + description="Get paginated team creation invite codes with their details including user information for created_by and used_by. Only authorized users can access this endpoint. Default: 10 items per page.", + tags=["team-creation-invite-codes"], + parameters=[ + OpenApiParameter( + name="page", + location=OpenApiParameter.QUERY, + description="Page number (default: 1)", + required=False, + type=OpenApiTypes.INT, + ), + OpenApiParameter( + name="limit", + location=OpenApiParameter.QUERY, + description="Number of items per page (default: 10, max: 50)", + required=False, + type=OpenApiTypes.INT, + ), + ], + examples=[ + OpenApiExample("Default pagination", value={}, description="Get first 10 items (default)"), + OpenApiExample("Custom pagination", value={"page": 2, "limit": 5}, description="Get 5 items from page 2"), + OpenApiExample("Large page size", value={"limit": 20}, description="Get first 20 items"), + ], + responses={ + 200: OpenApiResponse( + response=GetTeamCreationInviteCodesResponse, + description="Team creation invite codes retrieved successfully", + ), + 400: OpenApiResponse(description="Bad request - invalid query parameters"), + 401: OpenApiResponse(description="Unauthorized - authentication required"), + 403: OpenApiResponse(description="Forbidden - user not authorized to view invite codes"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def get(self, request: Request): + """ + Get paginated team creation invite codes with user details. + """ + user_email = request.user_email + + if not self._check_authorization(user_email): + return Response( + data={"message": "You are not authorized to perform this action."}, + status=status.HTTP_403_FORBIDDEN, + ) + + try: + page = int(request.query_params.get("page", 1)) + limit = int(request.query_params.get("limit", 10)) + + if page < 1: + page = 1 + if limit < 1 or limit > 50: + limit = 10 + + base_url = "/team-invite-codes" + + response: GetTeamCreationInviteCodesResponse = TeamCreationInviteCodeService.get_all_codes( + page, limit, base_url + ) + data = response.model_dump(mode="json") + return Response(data=data, status=status.HTTP_200_OK) + except ValueError as e: + return Response( + data={"message": f"Invalid query parameters: {str(e)}"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + return Response( + data={"message": f"Failed to retrieve team creation invite codes: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) diff --git a/todo/views/user.py b/todo/views/user.py new file mode 100644 index 00000000..d866d91d --- /dev/null +++ b/todo/views/user.py @@ -0,0 +1,119 @@ +from rest_framework.views import APIView +from rest_framework.response import Response +from rest_framework.request import Request +from todo.constants.messages import ApiErrors +from todo.services.user_service import UserService +from rest_framework import status +from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiResponse +from drf_spectacular.types import OpenApiTypes +from todo.dto.user_dto import UserSearchResponseDTO, UsersDTO +from todo.dto.responses.error_response import ApiErrorResponse + + +class UsersView(APIView): + @extend_schema( + operation_id="get_users", + summary="Get users with search and pagination", + description="Get user profile details or search users with fuzzy search. " + "Use 'profile=true' to get current user details, or use search parameter to find users.", + tags=["users"], + parameters=[ + OpenApiParameter( + name="profile", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Set to 'true' to get current user profile", + required=False, + ), + OpenApiParameter( + name="search", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Search query for name or email (fuzzy search)", + required=False, + ), + OpenApiParameter( + name="page", + type=OpenApiTypes.INT, + location=OpenApiParameter.QUERY, + description="Page number for pagination (default: 1)", + required=False, + ), + OpenApiParameter( + name="limit", + type=OpenApiTypes.INT, + location=OpenApiParameter.QUERY, + description="Number of results per page (default: 10, max: 100)", + required=False, + ), + ], + responses={ + 200: UserSearchResponseDTO, + 204: OpenApiResponse(description="No users found"), + 401: ApiErrorResponse, + 400: OpenApiResponse(description="Bad request - invalid parameters"), + 404: OpenApiResponse(description="Route does not exist"), + 500: OpenApiResponse(description="Internal server error"), + }, + ) + def get(self, request: Request): + profile = request.query_params.get("profile") + if profile == "true": + userData = UserService.get_user_by_id(request.user_id) + if not userData: + return Response( + { + "statusCode": 404, + "message": ApiErrors.USER_NOT_FOUND, + "data": None, + }, + status=404, + ) + userData = userData.model_dump(mode="json", exclude_none=True) + userResponse = { + "id": userData["id"], + "email": userData["email_id"], + "name": userData.get("name"), + "picture": userData.get("picture"), + } + return Response( + { + "message": "Current user details fetched successfully", + "data": userResponse, + }, + status=200, + ) + + # Handle search functionality + search = request.query_params.get("search", "").strip() + page = int(request.query_params.get("page", 1)) + limit = int(request.query_params.get("limit", 10)) + + # If no search parameter provided, return 404 + if search: + users, total_count = UserService.search_users(search, page, limit) + else: + users, total_count = UserService.get_all_users(page, limit) + + user_dtos = [ + UsersDTO( + id=str(user.id), + name=user.name, + ) + for user in users + ] + + response_data = UserSearchResponseDTO( + users=user_dtos, + total_count=total_count, + page=page, + limit=limit, + ) + + return Response( + { + "message": "Users fetched successfully", + "data": response_data.model_dump(), + }, + status=status.HTTP_200_OK, + ) diff --git a/todo/views/user_role.py b/todo/views/user_role.py new file mode 100644 index 00000000..1f4add90 --- /dev/null +++ b/todo/views/user_role.py @@ -0,0 +1,94 @@ +from rest_framework.views import APIView +from rest_framework.response import Response +from rest_framework import status +from drf_spectacular.utils import extend_schema, OpenApiExample + +from todo.services.user_role_service import UserRoleService + + +class UserRoleListView(APIView): + def get(self, request, user_id: str): + scope = request.query_params.get("scope") + user_roles = UserRoleService.get_user_roles(user_id, scope) + + return Response({"user_id": user_id, "roles": user_roles, "total": len(user_roles)}) + + +class TeamUserRoleListView(APIView): + def get(self, request, team_id: str): + team_users = UserRoleService.get_team_users_with_roles(team_id) + return Response({"team_id": team_id, "users": team_users, "total": len(team_users)}) + + +class TeamUserRoleDetailView(APIView): + def get(self, request, team_id: str, user_id: str): + user_roles = UserRoleService.get_user_roles(user_id, "TEAM", team_id) + return Response({"team_id": team_id, "user_id": user_id, "roles": user_roles}) + + @extend_schema( + request={ + "application/json": { + "type": "object", + "properties": {"role_name": {"type": "string"}}, + "required": ["role_name"], + "example": {"role_name": "admin"}, + } + }, + examples=[ + OpenApiExample("Assign Role Example", value={"role_name": "admin"}, request_only=True, response_only=False) + ], + ) + def post(self, request, team_id: str, user_id: str): + role_name = request.data.get("role_name") + if not role_name: + return Response({"error": "role_name is required"}, status=status.HTTP_400_BAD_REQUEST) + + success = UserRoleService.assign_role(user_id, role_name, "TEAM", team_id) + + if success: + return Response( + { + "message": f"Role '{role_name}' assigned to user {user_id}", + "team_id": team_id, + "user_id": user_id, + "role_name": role_name, + } + ) + else: + return Response({"error": "Failed to assign role"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + +class TeamUserRoleDeleteView(APIView): + @extend_schema( + parameters=[ + { + "name": "role_id", + "in": "path", + "required": True, + "description": "The ID of the role to remove", + "schema": {"type": "string"}, + } + ], + examples=[ + OpenApiExample( + "Remove Role Example", + value={"role_id": "60d5f9f8f8f8f8f8f8f8f8f8"}, + request_only=True, + response_only=False, + ) + ], + ) + def delete(self, request, team_id: str, user_id: str, role_id: str): + success = UserRoleService.remove_role_by_id(user_id, role_id, "TEAM", team_id) + + if success: + return Response( + { + "message": f"Role with ID '{role_id}' removed from user {user_id}", + "team_id": team_id, + "user_id": user_id, + "role_id": role_id, + } + ) + else: + return Response({"message": f"Role with ID '{role_id}' not found for user {user_id}"}) diff --git a/todo/views/watchlist.py b/todo/views/watchlist.py new file mode 100644 index 00000000..d0413413 --- /dev/null +++ b/todo/views/watchlist.py @@ -0,0 +1,179 @@ +from rest_framework.views import APIView +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework import status +from bson import ObjectId +from todo.middlewares.jwt_auth import get_current_user_info +from todo.constants.messages import ApiErrors +from todo.serializers.update_watchlist_serializer import UpdateWatchlistSerializer +from todo.services.watchlist_service import WatchlistService +from todo.serializers.create_watchlist_serializer import CreateWatchlistSerializer +from todo.serializers.get_watchlist_tasks_serializer import GetWatchlistTaskQueryParamsSerializer +from todo.dto.responses.error_response import ApiErrorResponse +from todo.dto.watchlist_dto import CreateWatchlistDTO +from todo.dto.responses.create_watchlist_response import CreateWatchlistResponse +from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiResponse +from drf_spectacular.types import OpenApiTypes +from todo.dto.responses.get_watchlist_task_response import GetWatchlistTasksResponse +from todo.repositories.watchlist_repository import WatchlistRepository + + +class WatchlistListView(APIView): + @extend_schema( + operation_id="get_watchlist_tasks", + summary="Get paginated list of watchlisted tasks", + description="Retrieve a paginated list of tasks that are added to the authenticated user's watchlist. Each task includes assignee details showing who the task belongs to (who is responsible for completing the task).", + tags=["watchlist"], + parameters=[ + OpenApiParameter( + name="page", + type=OpenApiTypes.INT, + location=OpenApiParameter.QUERY, + description="Page number for pagination (default: 1)", + required=False, + ), + OpenApiParameter( + name="limit", + type=OpenApiTypes.INT, + location=OpenApiParameter.QUERY, + description="Number of tasks per page (default: 10, max: 100)", + required=False, + ), + ], + responses={ + 200: OpenApiResponse( + response=GetWatchlistTasksResponse, + description="Paginated list of watchlisted tasks with assignee details (task ownership) returned successfully", + ), + 400: OpenApiResponse(response=ApiErrorResponse, description="Bad request - validation error"), + 500: OpenApiResponse(response=ApiErrorResponse, description="Internal server error"), + }, + ) + def get(self, request: Request): + """ + Retrieve a paginated list of tasks that are added to watchlist. + """ + query = GetWatchlistTaskQueryParamsSerializer(data=request.query_params) + query.is_valid(raise_exception=True) + + user = get_current_user_info(request) + + response = WatchlistService.get_watchlisted_tasks( + page=query.validated_data["page"], + limit=query.validated_data["limit"], + user_id=user["user_id"], + ) + return Response(data=response.model_dump(mode="json"), status=status.HTTP_200_OK) + + @extend_schema( + operation_id="add_task_to_watchlist", + summary="Add a task to the watchlist", + description="Add a task to the authenticated user's watchlist.", + tags=["watchlist"], + request=CreateWatchlistSerializer, + responses={ + 201: OpenApiResponse(response=CreateWatchlistResponse, description="Task added to watchlist successfully"), + 400: OpenApiResponse( + response=ApiErrorResponse, description="Bad request - validation error or already in watchlist" + ), + 500: OpenApiResponse(response=ApiErrorResponse, description="Internal server error"), + }, + ) + def post(self, request: Request): + """ + Add a task to the watchlist. + """ + user = get_current_user_info(request) + + serializer = CreateWatchlistSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + + try: + dto = CreateWatchlistDTO(**serializer.validated_data, userId=user["user_id"], createdBy=user["user_id"]) + response: CreateWatchlistResponse = WatchlistService.add_task(dto) + return Response(data=response.model_dump(mode="json"), status=status.HTTP_201_CREATED) + + except ValueError as e: + if isinstance(e.args[0], ApiErrorResponse): + error_response = e.args[0] + return Response(data=error_response.model_dump(mode="json"), status=error_response.statusCode) + + fallback_response = ApiErrorResponse( + statusCode=500, + message=ApiErrors.UNEXPECTED_ERROR_OCCURRED, + errors=[{"detail": ApiErrors.INTERNAL_SERVER_ERROR}], + ) + return Response( + data=fallback_response.model_dump(mode="json"), status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) + + +class WatchlistDetailView(APIView): + @extend_schema( + operation_id="update_watchlist_task", + summary="Update watchlist status of a task", + description="Update the isActive status of a task in the authenticated user's watchlist. This allows users to activate or deactivate watching a specific task.", + tags=["watchlist"], + parameters=[ + OpenApiParameter( + name="task_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + description="Unique identifier of the task to update in the watchlist", + required=True, + ), + ], + request=UpdateWatchlistSerializer, + responses={ + 200: OpenApiResponse(description="Watchlist task status updated successfully"), + 400: OpenApiResponse(response=ApiErrorResponse, description="Bad request - validation error"), + 404: OpenApiResponse(response=ApiErrorResponse, description="Task not found in watchlist"), + 500: OpenApiResponse(response=ApiErrorResponse, description="Internal server error"), + }, + ) + def patch(self, request: Request, task_id: str): + """ + Update the watchlist status of a task. + """ + user = get_current_user_info(request) + task_id = ObjectId(task_id) + serializer = UpdateWatchlistSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + + WatchlistService.update_task(task_id, serializer.validated_data, ObjectId(user["user_id"])) + return Response(status=status.HTTP_200_OK) + + +class WatchlistCheckView(APIView): + @extend_schema( + operation_id="check_task_in_watchlist", + summary="Check if a task is in the user's watchlist", + description="Returns the watchlist status for the given task_id: true if actively watched, false if in watchlist but inactive, or null if not in watchlist.", + tags=["watchlist"], + parameters=[ + OpenApiParameter( + name="task_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Task ID to check", + required=True, + ), + ], + responses={ + 200: OpenApiResponse(response=None, description="Returns { 'in_watchlist': true/false/null }"), + 400: OpenApiResponse(response=ApiErrorResponse, description="Bad request - validation error"), + 401: OpenApiResponse(response=ApiErrorResponse, description="Unauthorized"), + }, + ) + def get(self, request: Request): + user = get_current_user_info(request) + task_id = request.query_params.get("task_id") + if not task_id: + return Response({"message": "task_id is required"}, status=status.HTTP_400_BAD_REQUEST) + if not ObjectId.is_valid(task_id): + return Response({"message": "Invalid task_id"}, status=status.HTTP_400_BAD_REQUEST) + in_watchlist = None + watchlist_entry = WatchlistRepository.get_by_user_and_task(user["user_id"], task_id) + if watchlist_entry: + in_watchlist = watchlist_entry.isActive + return Response({"in_watchlist": in_watchlist}, status=status.HTTP_200_OK) diff --git a/todo_project/__init__.py b/todo_project/__init__.py index 84a4d93e..25c41747 100644 --- a/todo_project/__init__.py +++ b/todo_project/__init__.py @@ -1 +1 @@ -# Added this because without this file Django isn't able to auto detect the test files +# Django project initialization diff --git a/todo_project/db/config.py b/todo_project/db/config.py index f7ade230..b80e96c4 100644 --- a/todo_project/db/config.py +++ b/todo_project/db/config.py @@ -18,7 +18,7 @@ def __new__(cls, *args, **kwargs): def _get_database_client(self): if self._database_client is None: - self._database_client = MongoClient(settings.MONGODB_URI) + self._database_client = MongoClient(settings.MONGODB_URI, tz_aware=True) return self._database_client def get_database(self): @@ -39,3 +39,9 @@ def check_database_health(self): except ConnectionFailure as e: logger.error(f"Failed to establish database connection: {e}") return False + + @classmethod + def reset(cls): + if cls.__instance is not None and cls.__instance._database_client is not None: + cls.__instance._database_client.close() + cls.__instance = None diff --git a/todo_project/db/init.py b/todo_project/db/init.py index ca27b3a9..63ceb2e2 100644 --- a/todo_project/db/init.py +++ b/todo_project/db/init.py @@ -1,6 +1,8 @@ import logging import time from todo_project.db.config import DatabaseManager +from todo_project.db.migrations import run_all_migrations +from todo.services.postgres_sync_service import PostgresSyncService logger = logging.getLogger(__name__) @@ -17,7 +19,7 @@ def initialize_database(max_retries=5, retry_delay=2): if not db_manager.check_database_health(): if attempt < max_retries - 1: logger.warning( - f"Database health check failed, attempt {attempt+1}. Retrying in {retry_delay} seconds..." + f"Database health check failed, attempt {attempt + 1}. Retrying in {retry_delay} seconds..." ) time.sleep(retry_delay) continue @@ -44,6 +46,21 @@ def initialize_database(max_retries=5, retry_delay=2): else: logger.info(f"taskDisplayId counter already exists with value {task_counter['seq']}") + # Run database migrations + migrations_success = run_all_migrations() + if not migrations_success: + logger.warning("Some database migrations failed, but continuing with initialization") + + try: + postgres_sync_service = PostgresSyncService() + postgres_sync_success = postgres_sync_service.sync_all_tables() + if not postgres_sync_success: + logger.warning("Some PostgreSQL table synchronizations failed, but continuing with initialization") + else: + logger.info("PostgreSQL table synchronization completed successfully") + except Exception as e: + logger.warning(f"PostgreSQL table synchronization failed: {str(e)}, but continuing with initialization") + logger.info("Database initialization completed successfully") return True except Exception as e: diff --git a/todo_project/db/migrations.py b/todo_project/db/migrations.py new file mode 100644 index 00000000..714625ee --- /dev/null +++ b/todo_project/db/migrations.py @@ -0,0 +1,239 @@ +import logging +from datetime import datetime, timezone +from typing import List, Dict, Any +from todo_project.db.config import DatabaseManager +from todo.models.label import LabelModel +from todo.models.role import RoleModel +from todo.constants.role import RoleName, RoleScope + +logger = logging.getLogger(__name__) + + +def migrate_fixed_labels() -> bool: + """ + Migration to add fixed labels to the system. + This migration is idempotent and can be run multiple times safely. + + Labels to be added: + 1. Feature + 2. Bug + 3. Refactoring/Optimization + 4. API + 5. UI/UX + 6. Testing + 7. Documentation + 8. Review + + Returns: + bool: True if migration completed successfully, False otherwise + """ + logger.info("Starting fixed labels migration") + + fixed_labels: List[Dict[str, Any]] = [ + { + "name": "Feature", + "color": "#22c55e", + "description": "New feature implementation", + }, + { + "name": "Bug", + "color": "#ef4444", + "description": "Bug fixes and error corrections", + }, + { + "name": "Refactoring/Optimization", + "color": "#f59e0b", + "description": "Code refactoring and performance optimization", + }, + { + "name": "API", + "color": "#3b82f6", + "description": "API development and integration", + }, + { + "name": "UI/UX", + "color": "#8b5cf6", + "description": "User interface and user experience improvements", + }, + { + "name": "Testing", + "color": "#06b6d4", + "description": "Testing and quality assurance", + }, + { + "name": "Documentation", + "color": "#64748b", + "description": "Documentation and guides", + }, + { + "name": "Review", + "color": "#ec4899", + "description": "Code review and peer review tasks", + }, + ] + + try: + db_manager = DatabaseManager() + labels_collection = db_manager.get_collection("labels") + + current_time = datetime.now(timezone.utc) + created_count = 0 + skipped_count = 0 + + for label_data in fixed_labels: + try: + existing_label = labels_collection.find_one( + {"name": {"$regex": f"^{label_data['name']}$", "$options": "i"}, "isDeleted": {"$ne": True}} + ) + + if existing_label: + logger.info(f"Label '{label_data['name']}' already exists, skipping") + skipped_count += 1 + continue + + label_document = { + "name": label_data["name"], + "color": label_data["color"], + "description": label_data["description"], + "isDeleted": False, + "createdAt": current_time, + "updatedAt": None, + "createdBy": "system", + "updatedBy": None, + } + + try: + LabelModel(**label_document) + except Exception as validation_error: + logger.error(f"Label validation failed for '{label_data['name']}': {validation_error}") + continue + + result = labels_collection.insert_one(label_document) + + if result.inserted_id: + logger.info(f"Successfully created label '{label_data['name']}' with ID: {result.inserted_id}") + created_count += 1 + else: + logger.error(f"Failed to create label '{label_data['name']}' - no ID returned") + + except Exception as e: + logger.error(f"Error processing label '{label_data['name']}': {str(e)}") + continue + + total_labels = len(fixed_labels) + logger.info( + f"Fixed labels migration completed - {created_count} created, {skipped_count} skipped, {total_labels} total" + ) + + return True + + except Exception as e: + logger.error(f"Fixed labels migration failed: {str(e)}") + return False + + +def migrate_predefined_roles() -> bool: + """Migration to add predefined roles to the system.""" + logger.info("Starting predefined roles migration") + + predefined_roles = [ + { + "name": RoleName.MODERATOR.value, + "scope": RoleScope.GLOBAL.value, + "description": "Global system moderator", + "is_active": True, + }, + { + "name": RoleName.OWNER.value, + "scope": RoleScope.TEAM.value, + "description": "Team owner with full privileges", + "is_active": True, + }, + { + "name": RoleName.ADMIN.value, + "scope": RoleScope.TEAM.value, + "description": "Team administrator", + "is_active": True, + }, + {"name": RoleName.MEMBER.value, "scope": RoleScope.TEAM.value, "description": "Team member", "is_active": True}, + ] + + try: + db_manager = DatabaseManager() + roles_collection = db_manager.get_collection("roles") + + current_time = datetime.now(timezone.utc) + created_count = 0 + skipped_count = 0 + + for role_data in predefined_roles: + existing = roles_collection.find_one( + {"name": {"$regex": f"^{role_data['name']}$", "$options": "i"}, "scope": role_data["scope"]} + ) + + if existing: + logger.info(f"Role '{role_data['name']}' ({role_data['scope']}) already exists, skipping") + skipped_count += 1 + continue + + try: + role_doc = { + "name": role_data["name"], + "scope": role_data["scope"], + "description": role_data["description"], + "is_active": role_data["is_active"], + "created_at": current_time, + "created_by": "system", + } + + validated_role = RoleModel(**role_doc) + validated_doc = validated_role.model_dump(mode="json", by_alias=True, exclude_none=True) + + result = roles_collection.insert_one(validated_doc) + if result.inserted_id: + logger.info(f"Created role: {role_data['name']} ({role_data['scope']})") + created_count += 1 + + except Exception as validation_error: + logger.error(f"Validation failed for role '{role_data['name']}': {validation_error}") + continue + + logger.info(f"Roles migration completed - {created_count} created, {skipped_count} skipped") + return True + + except Exception as e: + logger.error(f"Roles migration failed: {str(e)}") + return False + + +def run_all_migrations() -> bool: + """ + Run all database migrations. + + Returns: + bool: True if all migrations completed successfully, False otherwise + """ + logger.info("Starting database migrations") + + migrations = [ + ("Fixed Labels Migration", migrate_fixed_labels), + ("Predefined Roles Migration", migrate_predefined_roles), + ] + + success_count = 0 + + for migration_name, migration_func in migrations: + try: + logger.info(f"Running {migration_name}") + if migration_func(): + logger.info(f"{migration_name} completed successfully") + success_count += 1 + else: + logger.error(f"{migration_name} failed") + except Exception as e: + logger.error(f"{migration_name} failed with exception: {str(e)}") + + total_migrations = len(migrations) + logger.info(f"Database migrations completed - {success_count}/{total_migrations} successful") + + return success_count == total_migrations diff --git a/todo_project/settings/base.py b/todo_project/settings/base.py index dd73a406..49281c06 100644 --- a/todo_project/settings/base.py +++ b/todo_project/settings/base.py @@ -1,4 +1,5 @@ import os +import sys from pathlib import Path # Build paths inside the project like this: BASE_DIR / 'subdir'. @@ -13,28 +14,61 @@ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True -ALLOWED_HOSTS = [] +ALLOWED_HOSTS = os.getenv("ALLOWED_HOSTS", "").split(",") if os.getenv("ALLOWED_HOSTS") else [] MONGODB_URI = os.getenv("MONGODB_URI") DB_NAME = os.getenv("DB_NAME") -# Application definition + +# Postgres Configuration +POSTGRES_HOST = os.getenv("POSTGRES_HOST", "localhost") +POSTGRES_PORT = os.getenv("POSTGRES_PORT", "5432") +POSTGRES_DB = os.getenv("POSTGRES_DB", "todo_postgres") +POSTGRES_USER = os.getenv("POSTGRES_USER", "todo_user") +POSTGRES_PASSWORD = os.getenv("POSTGRES_PASSWORD", "todo_password") INSTALLED_APPS = [ + "django.contrib.staticfiles", + "corsheaders", "rest_framework", + "drf_spectacular", "todo", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.admin", ] MIDDLEWARE = [ + "corsheaders.middleware.CorsMiddleware", "django.middleware.security.SecurityMiddleware", - "django.middleware.common.CommonMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.common.CommonMiddleware", + "todo.middlewares.jwt_auth.JWTAuthenticationMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", ] ROOT_URLCONF = "todo_project.urls" - WSGI_APPLICATION = "todo_project.wsgi.application" +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + }, +] LANGUAGE_CODE = "en-us" @@ -44,6 +78,21 @@ USE_TZ = True +SESSION_ENGINE = "django.contrib.sessions.backends.cache" +SESSION_CACHE_ALIAS = "default" +SESSION_COOKIE_AGE = 3600 +SESSION_SAVE_EVERY_REQUEST = False + +CACHES = { + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + "LOCATION": "oauth-sessions", + } +} + +ADMIN_EMAILS = os.getenv("ADMIN_EMAILS", "").split(",") + + REST_FRAMEWORK = { "DEFAULT_RENDERER_CLASSES": [ "rest_framework.renderers.JSONRenderer", @@ -54,4 +103,163 @@ "DEFAULT_PAGE_LIMIT": 20, "MAX_PAGE_LIMIT": 200, }, + "DEFAULT_AUTHENTICATION_CLASSES": [ + "rest_framework.authentication.SessionAuthentication", + "rest_framework.authentication.BasicAuthentication", + ], + "DEFAULT_PERMISSION_CLASSES": [ + "rest_framework.permissions.AllowAny", + ], + "DEFAULT_SCHEMA_CLASS": "drf_spectacular.openapi.AutoSchema", +} + +# APPEND_SLASH = False # Fix the routing issue with trailing slashes and then uncomment this line + +GOOGLE_OAUTH = { + "CLIENT_ID": os.getenv("GOOGLE_OAUTH_CLIENT_ID"), + "CLIENT_SECRET": os.getenv("GOOGLE_OAUTH_CLIENT_SECRET"), + "REDIRECT_URI": os.getenv("GOOGLE_OAUTH_REDIRECT_URI"), } + +TESTING = "test" in sys.argv or "pytest" in sys.modules or os.getenv("TESTING") == "True" + +if TESTING: + # Test JWT configuration (HS256 - simpler for tests) + JWT_CONFIG = { + "ALGORITHM": "HS256", + "PRIVATE_KEY": "test-secret-key-for-jwt-signing-very-long-key-needed-for-security", + "PUBLIC_KEY": "test-secret-key-for-jwt-signing-very-long-key-needed-for-security", + "ACCESS_TOKEN_LIFETIME": int(os.getenv("ACCESS_LIFETIME", "3600")), + "REFRESH_TOKEN_LIFETIME": int(os.getenv("REFRESH_LIFETIME", "604800")), + } +else: + JWT_CONFIG = { + "ALGORITHM": "RS256", + "PRIVATE_KEY": os.getenv("PRIVATE_KEY"), + "PUBLIC_KEY": os.getenv("PUBLIC_KEY"), + "ACCESS_TOKEN_LIFETIME": int(os.getenv("ACCESS_LIFETIME", "3600")), + "REFRESH_TOKEN_LIFETIME": int(os.getenv("REFRESH_TOKEN_LIFETIME", "604800")), + } + +COOKIE_SETTINGS = { + "ACCESS_COOKIE_NAME": os.getenv("ACCESS_TOKEN_COOKIE_NAME", "todo-access"), + "REFRESH_COOKIE_NAME": os.getenv("REFRESH_TOKEN_COOKIE_NAME", "todo-refresh"), + "COOKIE_DOMAIN": os.getenv("COOKIE_DOMAIN", "localhost"), + "COOKIE_SECURE": os.getenv("COOKIE_SECURE", "True").lower() == "true", + "COOKIE_HTTPONLY": os.getenv("COOKIE_HTTPONLY", "True").lower() == "true", + "COOKIE_SAMESITE": os.getenv("COOKIE_SAMESITE", "Strict"), + "COOKIE_PATH": "/", +} + +SERVICES = { + "TODO_UI": { + "URL": os.getenv("TODO_UI_BASE_URL", "http://localhost:3000"), + "REDIRECT_PATH": os.getenv("TODO_UI_REDIRECT_PATH", "dashboard"), + }, + "TODO_BACKEND": { + "URL": os.getenv("TODO_BACKEND_BASE_URL", "http://localhost:8000"), + }, +} + +# Database Configuration +# Only configure PostgreSQL if not in testing mode +if not TESTING: + DATABASES = { + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": POSTGRES_DB, + "USER": POSTGRES_USER, + "PASSWORD": POSTGRES_PASSWORD, + "HOST": POSTGRES_HOST, + "PORT": POSTGRES_PORT, + "OPTIONS": { + "sslmode": "prefer", + }, + } + } +else: + DATABASES = { + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": ":memory:", + } + } + +# Dual-Write Configuration +DUAL_WRITE_ENABLED = os.getenv("DUAL_WRITE_ENABLED", "True").lower() == "true" +DUAL_WRITE_RETRY_ATTEMPTS = int(os.getenv("DUAL_WRITE_RETRY_ATTEMPTS", "3")) +DUAL_WRITE_RETRY_DELAY = int(os.getenv("DUAL_WRITE_RETRY_DELAY", "5")) # seconds + +PUBLIC_PATHS = [ + "/favicon.ico", + "/v1/health", + "/api/docs", + "/api/docs/", + "/api/schema", + "/api/schema/", + "/api/redoc", + "/api/redoc/", + "/static/", + "/v1/auth/google/login", + "/v1/auth/google/callback", + "/v1/auth/logout", + "/v1/auth/google/status", + "/v1/auth/google/refresh", +] + +# Swagger/OpenAPI Configuration +SPECTACULAR_SETTINGS = { + "TITLE": "Todo API", + "DESCRIPTION": "A comprehensive Todo API with authentication and task management", + "VERSION": "1.0.0", + "SERVE_INCLUDE_SCHEMA": False, + "COMPONENT_SPLIT_REQUEST": True, + "SCHEMA_PATH_PREFIX": "/v1/", + "SWAGGER_UI_SETTINGS": { + "url": os.getenv("SWAGGER_UI_PATH", "/api/schema"), + }, + "SERVERS": [ + { + "url": f"{SERVICES.get('TODO_BACKEND').get('URL')}", + "description": "Development server", + }, + ], + "TAGS": [ + {"name": "tasks", "description": "Task management operations"}, + {"name": "auth", "description": "Authentication operations"}, + {"name": "health", "description": "Health check endpoints"}, + ], + "CONTACT": { + "name": "API Support", + "email": "support@example.com", + }, + "LICENSE": { + "name": "MIT License", + "url": "https://opensource.org/licenses/MIT", + }, + "EXTERNAL_DOCS": { + "description": "Find more info here", + "url": "https://github.com/your-repo/todo-backend", + }, +} + +STATIC_URL = "/static/" + +CORS_ALLOWED_ORIGINS = os.getenv("CORS_ALLOWED_ORIGINS").split(",") +CORS_ALLOW_CREDENTIALS = True +CORS_ALLOWED_HEADERS = [ + "accept", + "accept-encoding", + "authorization", + "content-type", + "dnt", + "origin", + "user-agent", + "x-csrftoken", + "x-requested-with", +] + +CSRF_COOKIE_SECURE = True + +SESSION_COOKIE_SECURE = True +SESSION_COOKIE_SAMESITE = "Lax" diff --git a/todo_project/settings/configure.py b/todo_project/settings/configure.py index 91765314..85352e68 100644 --- a/todo_project/settings/configure.py +++ b/todo_project/settings/configure.py @@ -6,8 +6,10 @@ ENV_VAR_NAME = "ENV" PRODUCTION = "PRODUCTION" DEVELOPMENT = "DEVELOPMENT" +STAGING = "STAGING" PRODUCTION_SETTINGS = "todo_project.settings.production" DEVELOPMENT_SETTINGS = "todo_project.settings.development" +STAGING_SETTINGS = "todo_project.settings.staging" DEFAULT_SETTINGS = DEVELOPMENT_SETTINGS @@ -18,5 +20,7 @@ def configure_settings_module(): if env == PRODUCTION: django_settings_module = PRODUCTION_SETTINGS + elif env == STAGING: + django_settings_module = STAGING_SETTINGS os.environ.setdefault("DJANGO_SETTINGS_MODULE", django_settings_module) diff --git a/todo_project/settings/development.py b/todo_project/settings/development.py index aa4ee915..59fcbdcb 100644 --- a/todo_project/settings/development.py +++ b/todo_project/settings/development.py @@ -1,2 +1,4 @@ -# Add settings for development environment here -from .base import * # noqa: F403 +# Development specific settings +from .base import * + +DEBUG = True diff --git a/todo_project/settings/production.py b/todo_project/settings/production.py index dc05bc46..42e5eac3 100644 --- a/todo_project/settings/production.py +++ b/todo_project/settings/production.py @@ -1,6 +1,3 @@ -from .base import * # noqa: F403 -import os +from .base import * DEBUG = False - -ALLOWED_HOSTS = os.getenv("ALLOWED_HOSTS").split(",") diff --git a/todo_project/settings/staging.py b/todo_project/settings/staging.py new file mode 100644 index 00000000..3ac92deb --- /dev/null +++ b/todo_project/settings/staging.py @@ -0,0 +1,3 @@ +from .base import * + +DEBUG = True diff --git a/todo_project/settings/test.py b/todo_project/settings/test.py new file mode 100644 index 00000000..8fd9be81 --- /dev/null +++ b/todo_project/settings/test.py @@ -0,0 +1,10 @@ +from .base import * + +DUAL_WRITE_ENABLED = False + +# Remove PostgreSQL database configuration for tests +# This prevents Django from trying to connect to PostgreSQL +DATABASES = {} + +# Use MongoDB only for tests +# The tests will use testcontainers to spin up their own MongoDB instance diff --git a/todo_project/tests/unit/test_database_manager.py b/todo_project/tests/unit/test_database_manager.py index 2c739693..e5411ade 100644 --- a/todo_project/tests/unit/test_database_manager.py +++ b/todo_project/tests/unit/test_database_manager.py @@ -27,7 +27,7 @@ def test_initializes_db_client_on_first_call(self, mock_mongo_client): mock_mongo_client.return_value = mock_client_instance db_client = self.database_manager._get_database_client() - mock_mongo_client.assert_called_once_with(settings.MONGODB_URI) + mock_mongo_client.assert_called_once_with(settings.MONGODB_URI, tz_aware=True) self.assertIs(db_client, mock_client_instance) diff --git a/todo_project/urls.py b/todo_project/urls.py index e3c7181c..7a6dee99 100644 --- a/todo_project/urls.py +++ b/todo_project/urls.py @@ -1,5 +1,10 @@ from django.urls import path, include +from drf_spectacular.views import SpectacularAPIView, SpectacularSwaggerView, SpectacularRedocView urlpatterns = [ path("v1/", include("todo.urls"), name="api"), + # Swagger/OpenAPI endpoints + path("api/schema", SpectacularAPIView.as_view(), name="schema"), + path("api/docs", SpectacularSwaggerView.as_view(url_name="schema"), name="swagger-ui"), + path("api/redoc", SpectacularRedocView.as_view(url_name="schema"), name="redoc"), ]