diff --git a/.codecov.yml b/.codecov.yml deleted file mode 100644 index 23c7dd0..0000000 --- a/.codecov.yml +++ /dev/null @@ -1,30 +0,0 @@ -codecov: - require_ci_to_pass: yes - -coverage: - precision: 2 - round: down - range: "70...100" - status: - project: - default: - target: 70% - threshold: 0% - patch: yes - changes: yes - -ignore: - - "cmd/export_trakt/**/*" # Exclude main package from coverage - -parsers: - gcov: - branch_detection: - conditional: yes - loop: yes - method: no - macro: no - -comment: - layout: "reach,diff,flags,files,footer" - behavior: default - require_changes: no diff --git a/.dockerignore b/.dockerignore index 3037947..e877ecd 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,53 +1,7 @@ -# Git and GitHub -.git/ -.github/ +.git .gitignore - -# Development and CI files -.vscode/ -.idea/ -*.swp -*.swo -.env -.env.example -docker-compose*.yml -.dockerignore -*.md -!README.md -!LICENSE - -# Build artifacts -bin/ -coverage* -*.out -*.test -*.prof -__debug_bin* -tmp/ - -# Generated data -exports/ -logs/ -config/ -!config/config.example.toml -*.csv -*.json - -# Node.js files (for old version) -node_modules/ -npm-debug.log - -# Go specific -vendor/ -dist/ - -# Test directories -tests/ - -# Documentation -docs/ - .DS_Store +logs/ backup/ copy/ brain_ops/ @@ -55,4 +9,6 @@ TEMP/ *.tar.gz letterboxd_import.csv watched_*.csv -watchlist_*.csv \ No newline at end of file +watchlist_*.csv +README.md +LICENSE \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/beta_feedback.md b/.github/ISSUE_TEMPLATE/beta_feedback.md deleted file mode 100644 index c00476a..0000000 --- a/.github/ISSUE_TEMPLATE/beta_feedback.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -name: Beta Feedback -about: Provide feedback on the beta version of Export_Trakt_4_Letterboxd 2.0 -title: "[Beta Feedback] " -labels: "beta-feedback, go" -assignees: "JohanDevl" ---- - -## Beta Feedback - -Thank you for trying the beta version of Export_Trakt_4_Letterboxd 2.0! - -### Version Information - -- Beta version: -- Platform: -- Architecture: - -### What Worked Well - - - -### Issues Encountered - - - -### Feature Requests - - - -### Performance - - - -### Documentation - - - -### Installation Experience - - - -### Additional Comments - - - -### Logs (if applicable) - - - -Paste logs here or attach log files diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index ba889ac..2170bb5 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,57 +1,35 @@ --- -name: Bug Report +name: Bug report about: Create a report to help us improve -title: "[Bug] " -labels: "bug" +title: "[BUG] " +labels: bug assignees: "" --- -## Bug Report +**Describe the bug** +A clear and concise description of what the bug is. -### Description +**To Reproduce** +Steps to reproduce the behavior: - +1. Go to '...' +2. Run command '....' +3. See error -### Version Information +**Expected behavior** +A clear and concise description of what you expected to happen. -- Version: -- Platform: -- Architecture: +**Screenshots** +If applicable, add screenshots to help explain your problem. -### Steps to Reproduce +**Environment (please complete the following information):** -1. -2. -3. +- OS: [e.g. Ubuntu 20.04, macOS 12.0] +- Docker version (if applicable): [e.g. 20.10.12] +- Script version: [e.g. commit hash or version tag] -### Expected Behavior +**Logs** +Please include relevant logs from the `logs` directory if applicable. - - -### Actual Behavior - - - -### Screenshots - - - -### Configuration File - - - -```toml -# Paste your config here -``` - -### Logs - - - -``` -Paste logs here or attach log files -``` - -### Additional Context - - +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 7bdeb06..b68e1b0 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,33 +1,22 @@ --- -name: Feature Request +name: Feature request about: Suggest an idea for this project -title: "[Feature] " -labels: "enhancement" +title: "[FEATURE] " +labels: enhancement assignees: "" --- -## Feature Request +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] -### Problem Statement +**Describe the solution you'd like** +A clear and concise description of what you want to happen. - +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. -### Proposed Solution +**Additional context** +Add any other context or screenshots about the feature request here. - - -### Alternative Solutions - - - -### Use Case - - - -### Additional Context - - - -### Would you be willing to help implement this feature? - - +**Would you be willing to contribute to this feature?** +Let us know if you'd be interested in helping implement this feature. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 27e03b3..dc99caf 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,38 +1,28 @@ -## Pull Request Description +## Description - +Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. -## Related Issues +Fixes # (issue) - +## Type of change -## Type of Change +Please delete options that are not relevant. - [ ] Bug fix (non-breaking change which fixes an issue) - [ ] New feature (non-breaking change which adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) -- [ ] This change requires a documentation update -- [ ] Refactoring (no functional changes) -- [ ] CI/CD or build process changes +- [ ] Documentation update -## Checklist +## How Has This Been Tested? -- [ ] I have read the [CONTRIBUTING](../blob/main/CONTRIBUTING.md) document -- [ ] My code follows the code style of this project -- [ ] I have added tests that prove my fix is effective or that my feature works -- [ ] New and existing unit tests pass locally with my changes -- [ ] I have updated the documentation accordingly -- [ ] My changes generate no new warnings -- [ ] Any dependent changes have been merged and published in downstream modules - -## Screenshots (if applicable) - - +Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. -## Testing Steps +## Checklist: - - -## Additional Information - - +- [ ] My code follows the style guidelines of this project +- [ ] I have performed a self-review of my own code +- [ ] I have commented my code, particularly in hard-to-understand areas +- [ ] I have made corresponding changes to the documentation +- [ ] My changes generate no new warnings +- [ ] I have tested my changes and they work as expected +- [ ] Any dependent changes have been merged and published in downstream modules diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml deleted file mode 100644 index 89651f4..0000000 --- a/.github/workflows/docker-build.yml +++ /dev/null @@ -1,209 +0,0 @@ -name: Docker Build and Publish - -on: - release: - types: [published] - push: - branches: - - main - - feature/go-migration - paths-ignore: - - "**.md" - - "docs/**" - - ".github/ISSUE_TEMPLATE/**" - pull_request: - branches: - - main - paths-ignore: - - "**.md" - - "docs/**" - - ".github/ISSUE_TEMPLATE/**" - workflow_dispatch: - -env: - REGISTRY_IMAGE: johandevl/export-trakt-4-letterboxd - GITHUB_REGISTRY: ghcr.io - GITHUB_IMAGE: ghcr.io/johandevl/export_trakt_4_letterboxd - -jobs: - build: - name: Build and push multi-platform Docker images - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - security-events: write - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Extract metadata for Docker - id: meta - uses: docker/metadata-action@v5 - with: - images: | - ${{ env.REGISTRY_IMAGE }} - ${{ env.GITHUB_IMAGE }} - tags: | - type=ref,event=branch - type=ref,event=pr - type=semver,pattern={{version}} - type=semver,pattern={{major}}.{{minor}} - type=sha,format=short - type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }} - - - name: Log in to Docker Hub - if: github.event_name != 'pull_request' - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Log in to GitHub Container Registry - if: github.event_name != 'pull_request' - uses: docker/login-action@v3 - with: - registry: ${{ env.GITHUB_REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Set build date - id: build_date - run: echo "BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT - - - name: Build and export Docker image - uses: docker/build-push-action@v6 - with: - context: . - file: ./Dockerfile - push: ${{ github.event_name != 'pull_request' }} - platforms: linux/amd64,linux/arm64,linux/arm/v7 - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - cache-from: type=gha,scope=${{ github.workflow }}-${{ github.ref_name }} - cache-to: ${{ github.event_name != 'pull_request' && format('type=gha,mode=max,scope={0}-{1}', github.workflow, github.ref_name) || '' }} - build-args: | - VERSION=${{ steps.meta.outputs.version }} - COMMIT_SHA=${{ github.sha }} - BUILD_DATE=${{ steps.build_date.outputs.BUILD_DATE }} - - - name: Scan image for vulnerabilities - if: github.event_name != 'pull_request' - uses: aquasecurity/trivy-action@master - with: - image-ref: ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }} - format: "sarif" - output: "trivy-results.sarif" - - - name: Upload Trivy scan results to GitHub Security tab - if: github.event_name != 'pull_request' - uses: github/codeql-action/upload-sarif@v3 - with: - sarif_file: "trivy-results.sarif" - - test: - name: Test Docker image - needs: build - runs-on: ubuntu-latest - if: github.event_name != 'pull_request' - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Log in to Docker Hub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Extract Docker metadata - id: meta - uses: docker/metadata-action@v5 - with: - images: ${{ env.REGISTRY_IMAGE }} - tags: | - type=sha,format=short - - - name: Pull image for testing - run: docker pull ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }} - - - name: Test Docker image - run: | - # Create test directories - mkdir -p ./test_config ./test_logs ./test_exports - - # Basic image test - check if it runs properly - docker run --rm \ - -v $(pwd)/test_config:/app/config \ - -v $(pwd)/test_logs:/app/logs \ - -v $(pwd)/test_exports:/app/exports \ - ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }} --help - - echo "Docker image tests passed successfully" - - notify: - name: Notify on success or failure - needs: [build, test] - runs-on: ubuntu-latest - if: always() && github.event_name == 'release' - - steps: - - name: Check build result - id: check - run: | - if ${{ needs.build.result == 'success' && needs.test.result == 'success' }}; then - echo "status=success" >> $GITHUB_OUTPUT - else - echo "status=failure" >> $GITHUB_OUTPUT - fi - - - name: Create GitHub comment - Success - if: steps.check.outputs.status == 'success' - uses: actions/github-script@v7 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - github.rest.issues.createComment({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - body: `✅ Docker images for version ${{ github.ref_name }} have been successfully built and published to: - - - Docker Hub: \`johandevl/export-trakt-4-letterboxd:${{ github.ref_name }}\` - - GitHub Packages: \`ghcr.io/johandevl/export_trakt_4_letterboxd:${{ github.ref_name }}\` - - The images are available for the following platforms: - - linux/amd64 - - linux/arm64 - - linux/arm/v7 - - To use the image: - \`\`\`bash - docker pull johandevl/export-trakt-4-letterboxd:${{ github.ref_name }} - \`\`\` - ` - }) - - - name: Create GitHub comment - Failure - if: steps.check.outputs.status == 'failure' - uses: actions/github-script@v7 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - github.rest.issues.createComment({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - body: `❌ Docker image build for version ${{ github.ref_name }} failed. Please check the [workflow run](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}) for details.` - }) diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml new file mode 100644 index 0000000..ff3fead --- /dev/null +++ b/.github/workflows/docker-publish.yml @@ -0,0 +1,209 @@ +name: Docker + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +on: + # Watch for pushes to branches and tags + push: + branches: ["main", "develop"] + tags: ["v*.*.*"] + paths-ignore: + - "**/*.md" + - "docs/**" + - "LICENSE" + + # Watch for pull requests to main and develop branches + pull_request: + branches: ["main", "develop"] + paths-ignore: + - "**/*.md" + - "docs/**" + - "LICENSE" + + # Allow manual triggering + workflow_dispatch: + inputs: + push_to_dockerhub: + description: "Push to Docker Hub" + required: false + default: false + type: boolean + +env: + # Default registry is GitHub Container Registry + GITHUB_REGISTRY: ghcr.io + # github.repository as / + GITHUB_IMAGE_NAME: ${{ github.repository }} + # Docker Hub registry and image name + DOCKERHUB_REGISTRY: docker.io + DOCKERHUB_IMAGE_NAME: johandevl/export-trakt-4-letterboxd + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: write + packages: write + # This is used to complete the identity challenge + # with sigstore/fulcio when running outside of PRs. + id-token: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + # Install the cosign tool except on PR + # https://github.com/sigstore/cosign-installer + - name: Install cosign + if: github.event_name != 'pull_request' + uses: sigstore/cosign-installer@59acb6260d9c0ba8f4a2f9d9b48431a222b68e20 #v3.5.0 + with: + cosign-release: "v2.2.4" + + # Get version information for main branch + - name: Get version info + id: version + run: | + if [[ "${{ github.ref }}" == "refs/tags/v"* ]]; then + # For tag 'v1.2.3', VERSION becomes '1.2.3' + VERSION="${{ github.ref_name }}" + VERSION=${VERSION#v} + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "tag=${{ github.ref_name }}" >> $GITHUB_OUTPUT + elif [[ "${{ github.ref }}" == "refs/heads/main" ]]; then + # Get latest version tag + LATEST_TAG=$(git tag -l "v*" | grep -v "-" | sort -V | tail -n 1) + if [ -z "$LATEST_TAG" ]; then + LATEST_TAG="v1.0.0" + fi + # Store the version without the 'v' prefix + VERSION=${LATEST_TAG#v} + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "tag=latest" >> $GITHUB_OUTPUT + else + # For develop branch or others + echo "version=dev" >> $GITHUB_OUTPUT + echo "tag=develop" >> $GITHUB_OUTPUT + fi + + # Get build date in ISO 8601 format + BUILD_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ") + echo "build_date=$BUILD_DATE" >> $GITHUB_OUTPUT + + # Get Git commit hash + VCS_REF=$(git rev-parse --short HEAD) + echo "vcs_ref=$VCS_REF" >> $GITHUB_OUTPUT + + # Set up BuildKit Docker container builder to be able to build + # multi-platform images and export cache + # https://github.com/docker/setup-buildx-action + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0 + + # Login against GitHub Container Registry + - name: Log into GitHub Container Registry + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 + with: + registry: ${{ env.GITHUB_REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # Login against Docker Hub + - name: Log into Docker Hub + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 + with: + registry: ${{ env.DOCKERHUB_REGISTRY }} + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + # Extract metadata (tags, labels) for Docker + # https://github.com/docker/metadata-action + - name: Extract Docker metadata + id: meta + uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # v5.0.0 + with: + images: | + ${{ env.GITHUB_REGISTRY }}/${{ env.GITHUB_IMAGE_NAME }} + ${{ env.DOCKERHUB_REGISTRY }}/${{ env.DOCKERHUB_IMAGE_NAME }} + tags: | + # For PR builds, use PR-# + type=ref,event=pr,prefix=PR- + # For semver tags + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + # Also add the vX.Y.Z format + type=semver,pattern=v{{version}} + # For branch-based tagging + type=ref,event=branch + # Add 'latest' tag for main branch + type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }} + # Add 'develop' tag for develop branch + type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }} + # Add vX.Y.Z tag for version tags + type=raw,value=v${{ steps.version.outputs.version }},enable=${{ startsWith(github.ref, 'refs/tags/v') }} + # Add tags for all builds + type=raw,value=v${{ steps.version.outputs.version }},enable=${{ github.ref == 'refs/heads/main' }} + type=raw,value=dev,enable=${{ github.ref == 'refs/heads/develop' }} + + # Debug step to see metadata output + - name: Debug metadata + run: | + echo "Generated tags:" + echo "${{ steps.meta.outputs.tags }}" + echo "PR number: ${{ github.event.pull_request.number }}" + echo "Ref: ${{ github.ref }}" + + # Build and push Docker image with Buildx + # https://github.com/docker/build-push-action + - name: Build and push Docker image + id: build-and-push + uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0 + with: + context: . + push: true # Always push regardless of event type + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + platforms: linux/amd64,linux/arm64,linux/arm/v7 + build-args: | + APP_VERSION=${{ steps.version.outputs.version }} + BUILD_DATE=${{ steps.version.outputs.build_date }} + VCS_REF=${{ steps.version.outputs.vcs_ref }} + # Add cache configuration + cache-from: type=gha + cache-to: type=gha,mode=max + + # Export Docker image for release + - name: Export Docker image digest + if: github.event_name != 'pull_request' + id: digest + run: | + echo "Publishing successful, no need to extract digest" + echo "digest=skipped" >> $GITHUB_OUTPUT + + # Sign the resulting Docker image digest for GitHub Container Registry + - name: Sign the published Docker image for GitHub + if: false # Temporarily disabled + env: + DIGEST: ${{ steps.digest.outputs.digest }} + run: | + echo "Skipping signing step to focus on successful publishing" + # for TAG in ${{ steps.meta.outputs.tags }}; do + # echo "Signing image: ${TAG}@${DIGEST}" + # cosign sign --yes ${TAG}@${DIGEST} || echo "Warning: Failed to sign ${TAG}" + # done + + # Create GitHub release for tags + - name: Create GitHub Release + if: startsWith(github.ref, 'refs/tags/v') + uses: softprops/action-gh-release@v1 + with: + name: Release ${{ github.ref_name }} + draft: false + prerelease: false + generate_release_notes: true diff --git a/.github/workflows/docker-test.yml b/.github/workflows/docker-test.yml new file mode 100644 index 0000000..d10b574 --- /dev/null +++ b/.github/workflows/docker-test.yml @@ -0,0 +1,150 @@ +name: Docker Image Test + +on: + pull_request: + branches: ["main", "develop"] + paths-ignore: + - "**/*.md" + - "docs/**" + - "LICENSE" + push: + branches: ["develop"] + paths-ignore: + - "**/*.md" + - "docs/**" + - "LICENSE" + +jobs: + unit-tests: + name: Run Unit Tests + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Install dependencies + run: | + sudo apt-get update + sudo apt-get install -y jq + # Install dependencies for building kcov + sudo apt-get install -y cmake g++ pkg-config libdw-dev binutils-dev libiberty-dev libcurl4-openssl-dev + # Clone, build and install kcov from source + git clone https://github.com/SimonKagstrom/kcov.git + cd kcov + mkdir build + cd build + cmake .. + make -j$(nproc) + sudo make install + cd ../.. + + - name: Run tests + run: | + ./tests/run_tests.sh + + - name: Generate coverage report + run: | + ./tests/run_tests.sh coverage + + - name: Upload coverage report + uses: actions/upload-artifact@v4 + with: + name: test-coverage + path: test-results/coverage + + docker-test: + name: Test Docker Image + runs-on: ubuntu-latest + needs: unit-tests + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build Docker image + uses: docker/build-push-action@v5 + with: + context: . + load: true + tags: trakt-export:test + cache-from: type=gha + cache-to: type=gha,mode=max + push: false + + - name: Verify Docker image + run: | + echo "🔍 Verifying Docker image structure and dependencies..." + + # Check if the image was built successfully + docker image inspect trakt-export:test + + # Check if the required scripts exist and are executable + docker run --rm trakt-export:test ls -la /app/Export_Trakt_4_Letterboxd.sh /app/setup_trakt.sh /app/docker-entrypoint.sh + + # Check if lib directory exists and scripts are executable + docker run --rm trakt-export:test bash -c "find /app/lib -name '*.sh' -type f -executable || echo 'No executable scripts found in lib directory'" + + # Check if the required directories exist + docker run --rm trakt-export:test ls -la /app/config /app/logs /app/copy /app/backup /app/TEMP + + # Check if the entrypoint script works correctly + docker run --rm trakt-export:test bash -c "test -x /app/docker-entrypoint.sh && echo '✅ Entrypoint script is executable'" + + # Check if the required tools are installed + docker run --rm trakt-export:test bash -c "command -v jq && command -v curl && command -v sed && echo '✅ Required tools are installed'" + + # Create a test container with the image but override the entrypoint + echo "🚀 Creating test container..." + docker run -d --name test-container --entrypoint bash trakt-export:test -c "sleep 60" + + # Wait a moment to ensure the container is running + sleep 5 + + # Check if the container is running + docker ps | grep test-container || { echo "❌ Container failed to start"; exit 1; } + + # Check if the configuration example file can be created manually + docker exec test-container bash -c "mkdir -p /app/config" + docker exec test-container bash -c "echo '# Test config' > /app/config/.config.cfg.example && echo '✅ Created test config file'" + + # Check if the cron setup code exists in the entrypoint script + docker exec test-container bash -c "grep -q 'CRON_SCHEDULE' /app/docker-entrypoint.sh && echo '✅ Cron setup code exists in entrypoint script'" + + # Clean up + docker stop test-container || true + docker rm test-container || true + + echo "✅ Docker image verification completed successfully" + + - name: Test Docker Compose + run: | + echo "🔍 Testing Docker Compose configuration..." + + # Create test environment variables + echo "CRON_SCHEDULE=* * * * *" > .env + echo "EXPORT_OPTION=normal" >> .env + + # Validate docker-compose.yml + docker compose config + + # Start the container with docker-compose + docker compose up -d + + # Check if the container is running + docker compose ps + + # Stop and remove the container + docker compose down + + echo "✅ Docker Compose test completed successfully" + + - name: Summary + run: | + echo "🎉 All Docker image tests passed successfully!" + echo "The Docker image is verified to be functional and ready for deployment." diff --git a/.github/workflows/go-build.yml b/.github/workflows/go-build.yml deleted file mode 100644 index 6ff3edb..0000000 --- a/.github/workflows/go-build.yml +++ /dev/null @@ -1,86 +0,0 @@ -name: Go Build and Docker Publish - -on: - push: - branches: [main, develop] - tags: ["v*"] - pull_request: - branches: [main, develop] - -jobs: - build: - name: Build Go App - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Go - uses: actions/setup-go@v4 - with: - go-version: "1.21" - cache: true - - - name: Install dependencies - run: go mod download - - - name: Build - run: | - mkdir -p build - go build -v -o build/export_trakt ./cmd/export_trakt - - - name: Upload build artifact - uses: actions/upload-artifact@v4 - with: - name: export-trakt-binary - path: build/export_trakt - - docker: - name: Build and Push Docker Image - runs-on: ubuntu-latest - needs: build - if: github.event_name != 'pull_request' - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Download build artifact - uses: actions/download-artifact@v4 - with: - name: export-trakt-binary - path: build - - - name: Make binary executable - run: chmod +x build/export_trakt - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Login to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.repository_owner }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Docker meta - id: meta - uses: docker/metadata-action@v5 - with: - images: ghcr.io/${{ github.repository_owner }}/export_trakt_4_letterboxd - tags: | - type=ref,event=branch - type=semver,pattern={{version}} - type=semver,pattern={{major}}.{{minor}} - type=sha - latest - - - name: Build and push Docker image - uses: docker/build-push-action@v6 - with: - context: . - file: ./Dockerfile - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - platforms: linux/amd64,linux/arm64 diff --git a/.github/workflows/go-tests.yml b/.github/workflows/go-tests.yml deleted file mode 100644 index 1b4771d..0000000 --- a/.github/workflows/go-tests.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: Go Tests - -on: - push: - branches: [main, feature/*, develop] - pull_request: - branches: [main, develop] - -jobs: - test: - name: Run Go Tests - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Go - uses: actions/setup-go@v4 - with: - go-version: "1.21" - cache: true - - - name: Install dependencies - run: go mod download - - - name: Run tests - run: go test -v ./... - - - name: Check test coverage - run: | - # Run tests with coverage, excluding main package - go test -coverprofile=coverage.out ./pkg/... - COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | tr -d '%') - echo "Total coverage (excluding main package): $COVERAGE%" - if (( $(echo "$COVERAGE < 70" | bc -l) )); then - echo "Code coverage is below 70%. Please add more tests." - exit 1 - fi - - - name: Generate coverage report - run: go tool cover -html=coverage.out -o coverage.html - - - name: Upload coverage report - uses: actions/upload-artifact@v4 - with: - name: coverage-report - path: coverage.html diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8ef2a3f..5cbadf6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,5 +1,10 @@ name: Create Release +permissions: + contents: write + packages: write + actions: read + on: push: tags: diff --git a/.github/workflows/static.yml b/.github/workflows/static.yml new file mode 100644 index 0000000..51c87e6 --- /dev/null +++ b/.github/workflows/static.yml @@ -0,0 +1,43 @@ +# Simple workflow for deploying static content to GitHub Pages +name: Deploy static content to Pages + +on: + # Runs on pushes targeting the default branch + push: + branches: ["main"] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages +permissions: + contents: read + pages: write + id-token: write + +# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. +# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. +concurrency: + group: "pages" + cancel-in-progress: false + +jobs: + # Single deploy job since we're just deploying + deploy: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Pages + uses: actions/configure-pages@v5 + - name: Upload artifact + uses: actions/upload-pages-artifact@v3 + with: + # Upload entire repository + path: "." + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 diff --git a/.gitignore b/.gitignore index 853e581..77766ab 100644 --- a/.gitignore +++ b/.gitignore @@ -24,9 +24,6 @@ creds.js # Private Files *.json !tests/mocks/*.json -!locales/*.json -!temp_locales/*.json -!locales/*/translation.json *.cfg *.csv *.csv.gz @@ -37,12 +34,6 @@ creds.js # Mac/OSX .DS_Store -.DS_Store? -._* -.Spotlight-V100 -.Trashes -ehthumbs.db -Thumbs.db # Cursor .cursor/ @@ -57,16 +48,6 @@ TEMP/ /backup/ backup/ -# Export files (keep directory structure but ignore content) -exports/* -!exports/.gitkeep -!exports/README.md - -# Log files (keep directory structure but ignore content) -logs/* -!logs/.gitkeep -!logs/README.md - # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] @@ -205,6 +186,7 @@ dmypy.json # Cython debug symbols cython_debug/ *.tar.gz +/logs test_run/ @@ -212,23 +194,3 @@ test_run/ test_export.sh test_real_data.sh test_prod/ - -# Config files with credentials -config/config.toml -# But keep the example file -!config/config.example.toml - -# Go specific ignores -export_trakt -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Go test files -*.test -*.out - -# Go workspace file -go.work diff --git a/.gitmodules b/.gitmodules index 774deee..9068175 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,15 @@ [submodule "wiki"] path = wiki url = https://github.com/JohanDevl/Export_Trakt_4_Letterboxd.wiki.git +[submodule "tests/bats"] + path = tests/bats + url = https://github.com/bats-core/bats-core.git +[submodule "tests/helpers/bats-support"] + path = tests/helpers/bats-support + url = https://github.com/bats-core/bats-support.git +[submodule "tests/helpers/bats-assert"] + path = tests/helpers/bats-assert + url = https://github.com/bats-core/bats-assert.git +[submodule "tests/helpers/bats-file"] + path = tests/helpers/bats-file + url = https://github.com/bats-core/bats-file.git diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 4434a68..0000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,140 +0,0 @@ -# Changelog - -All notable changes to the Export Trakt for Letterboxd project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## [Unreleased] - -### Added - -- **New execution modes**: `--run` flag for immediate one-time execution and `--schedule` flag for cron-based scheduling -- Comprehensive cron schedule validation with helpful error messages and examples -- Built-in scheduler with detailed logging and status reporting -- Support for immediate execution mode for testing and CI/CD integration -- Enhanced command-line interface with new scheduling options -- Comprehensive test suite with unit and integration tests -- Internationalization (i18n) support with English and French translations -- GitHub Actions CI/CD pipeline for automated testing -- Automated release workflow for cross-platform binary generation -- New issue templates for bug reports, feature requests, and beta feedback -- Enhanced documentation including contributing guide, installation instructions, and configuration guide -- Detailed scheduling examples and best practices documentation - -### Changed - -- Improved command-line argument handling with support for multiple execution modes -- Enhanced logging with scheduler-specific messages and status updates -- Better error handling for invalid cron expressions with user-friendly feedback - -## [2.0.0] - TBD - -### Added - -- Complete rewrite in Go for improved performance and maintainability -- Structured configuration using TOML format -- Comprehensive logging system with support for different log levels -- Advanced error handling with descriptive error messages -- Internationalization (i18n) support -- Multiple export formats (watched movies, watchlist, collections) -- Command-line interface with various options and flags -- Rate limiting for API requests to prevent exceeding Trakt.tv limits -- Retry mechanism for handling transient API failures -- Progress indication during exports -- Enhanced movie matching using TMDb IDs -- Support for advanced filtering (by rating, date range) -- Better handling of rewatched movies -- Cross-platform compatibility (Linux, macOS, Windows, ARM) -- Docker support with multi-arch images - -### Changed - -- Improved configuration handling with support for environment variables -- Enhanced Trakt.tv API client with better error handling -- More efficient data processing for large movie collections -- Better date handling with proper timezone support -- Improved CSV generation with proper escaping and formatting -- More reliable authentication flow with token refresh - -### Removed - -- Dependency on external tools (jq, curl) -- Temporary file usage for data processing - -## [1.5.0] - 2023-07-15 - -### Added - -- Docker support with multi-arch images -- GitHub Actions workflows for Docker builds -- Option to include collection items in export - -### Changed - -- Improved error handling and reporting -- Better support for special characters in movie titles -- Enhanced matching algorithm for movies - -## [1.4.0] - 2023-05-20 - -### Added - -- Backup functionality for API responses -- Support for exporting TV shows -- Optional logging to file -- Better date handling options - -### Changed - -- Improved authentication flow -- Enhanced API request handling - -## [1.3.0] - 2023-03-10 - -### Added - -- Support for filtering by minimum rating -- Option to include year in movie titles -- Enhanced watchlist export - -### Changed - -- Improved CSV formatting -- Better error messages - -## [1.2.0] - 2023-01-25 - -### Added - -- Export modes: normal, initial, complete -- Support for watched history with dates -- Automatic detection of rewatched movies - -### Changed - -- Improved Trakt.tv API integration -- Better configuration handling - -## [1.1.0] - 2022-11-12 - -### Added - -- TMDB integration for better movie matching -- Support for exporting ratings -- Configuration file for customization - -### Changed - -- Enhanced authentication mechanism -- Improved export format - -## [1.0.0] - 2022-09-01 - -### Added - -- Initial release -- Basic functionality to export Trakt.tv data -- Support for exporting to Letterboxd CSV format -- Simple authentication with Trakt.tv API -- Basic configuration options diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md deleted file mode 100644 index 7f36aaa..0000000 --- a/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,128 +0,0 @@ -# Contributor Covenant Code of Conduct - -## Our Pledge - -We as members, contributors, and leaders pledge to make participation in our -community a harassment-free experience for everyone, regardless of age, body -size, visible or invisible disability, ethnicity, sex characteristics, gender -identity and expression, level of experience, education, socio-economic status, -nationality, personal appearance, race, religion, or sexual identity -and orientation. - -We pledge to act and interact in ways that contribute to an open, welcoming, -diverse, inclusive, and healthy community. - -## Our Standards - -Examples of behavior that contributes to a positive environment for our -community include: - -- Demonstrating empathy and kindness toward other people -- Being respectful of differing opinions, viewpoints, and experiences -- Giving and gracefully accepting constructive feedback -- Accepting responsibility and apologizing to those affected by our mistakes, - and learning from the experience -- Focusing on what is best not just for us as individuals, but for the - overall community - -Examples of unacceptable behavior include: - -- The use of sexualized language or imagery, and sexual attention or - advances of any kind -- Trolling, insulting or derogatory comments, and personal or political attacks -- Public or private harassment -- Publishing others' private information, such as a physical or email - address, without their explicit permission -- Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Enforcement Responsibilities - -Project maintainers are responsible for clarifying and enforcing our standards of -acceptable behavior and will take appropriate and fair corrective action in -response to any behavior that they deem inappropriate, threatening, offensive, -or harmful. - -Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are -not aligned to this Code of Conduct, and will communicate reasons for moderation -decisions when appropriate. - -## Scope - -This Code of Conduct applies within all community spaces, and also applies when -an individual is officially representing the community in public spaces. -Examples of representing our community include using an official e-mail address, -posting via an official social media account, or acting as an appointed -representative at an online or offline event. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported to the project maintainers responsible for enforcement at -the project's issue tracker. -All complaints will be reviewed and investigated promptly and fairly. - -All project maintainers are obligated to respect the privacy and security of the -reporter of any incident. - -## Enforcement Guidelines - -Project maintainers will follow these Community Impact Guidelines in determining -the consequences for any action they deem in violation of this Code of Conduct: - -### 1. Correction - -**Community Impact**: Use of inappropriate language or other behavior deemed -unprofessional or unwelcome in the community. - -**Consequence**: A private, written warning from project maintainers, providing -clarity around the nature of the violation and an explanation of why the -behavior was inappropriate. A public apology may be requested. - -### 2. Warning - -**Community Impact**: A violation through a single incident or series -of actions. - -**Consequence**: A warning with consequences for continued behavior. No -interaction with the people involved, including unsolicited interaction with -those enforcing the Code of Conduct, for a specified period of time. This -includes avoiding interactions in community spaces as well as external channels -like social media. Violating these terms may lead to a temporary or -permanent ban. - -### 3. Temporary Ban - -**Community Impact**: A serious violation of community standards, including -sustained inappropriate behavior. - -**Consequence**: A temporary ban from any sort of interaction or public -communication with the community for a specified period of time. No public or -private interaction with the people involved, including unsolicited interaction -with those enforcing the Code of Conduct, is allowed during this period. -Violating these terms may lead to a permanent ban. - -### 4. Permanent Ban - -**Community Impact**: Demonstrating a pattern of violation of community -standards, including sustained inappropriate behavior, harassment of an -individual, or aggression toward or disparagement of classes of individuals. - -**Consequence**: A permanent ban from any sort of public interaction within -the community. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], -version 2.0, available at -https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. - -Community Impact Guidelines were inspired by [Mozilla's code of conduct -enforcement ladder](https://github.com/mozilla/diversity). - -[homepage]: https://www.contributor-covenant.org - -For answers to common questions about this code of conduct, see the FAQ at -https://www.contributor-covenant.org/faq. Translations are available at -https://www.contributor-covenant.org/translations. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8f2b17f..2fdb8d8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,137 +1,109 @@ -# Contributing to Export Trakt for Letterboxd - -Thank you for your interest in contributing to Export Trakt for Letterboxd! This document provides guidelines and instructions for contributing to this project. - -## Table of Contents - -- [Code of Conduct](#code-of-conduct) -- [Getting Started](#getting-started) - - [Development Setup](#development-setup) - - [Project Structure](#project-structure) -- [How to Contribute](#how-to-contribute) - - [Reporting Bugs](#reporting-bugs) - - [Suggesting Enhancements](#suggesting-enhancements) - - [Pull Requests](#pull-requests) -- [Development Guidelines](#development-guidelines) - - [Coding Standards](#coding-standards) - - [Testing](#testing) - - [Documentation](#documentation) -- [Release Process](#release-process) -- [License](#license) +# Contributing to Export Trakt 4 Letterboxd + +Thank you for your interest in contributing to Export Trakt 4 Letterboxd! This document provides guidelines and instructions for contributing to this project. ## Code of Conduct -Please read and follow our [Code of Conduct](CODE_OF_CONDUCT.md) to foster an open and welcoming environment. +By participating in this project, you agree to abide by our code of conduct. Please be respectful and considerate of others. -## Getting Started +## How to Contribute -### Development Setup +### Reporting Bugs -1. **Fork the repository** +If you find a bug, please create an issue using the bug report template. Be sure to include: -2. **Clone your fork** +- A clear description of the bug +- Steps to reproduce the issue +- Expected behavior +- Screenshots if applicable +- Your environment details - ```bash - git clone https://github.com/YOUR-USERNAME/Export_Trakt_4_Letterboxd.git - cd Export_Trakt_4_Letterboxd - ``` +### Suggesting Enhancements -3. **Set up the development environment** +If you have an idea for an enhancement, please create an issue using the feature request template. Be sure to include: - ```bash - # Install Go (if not already installed) - # macOS (using Homebrew): - brew install go +- A clear description of the feature +- The motivation for the feature +- Any alternative solutions you've considered - # Ubuntu/Debian: - sudo apt-get update - sudo apt-get install golang +### Pull Requests - # Windows: - # Download from https://golang.org/dl/ +1. Fork the repository +2. Create a new branch for your feature or bugfix +3. Make your changes +4. Test your changes +5. Submit a pull request - # Install dependencies - go mod download - ``` +Please follow these guidelines for your pull requests: -4. **Create a branch for your work** - ```bash - git checkout -b feature/your-feature-name - ``` +- Follow the coding style of the project +- Write clear commit messages +- Include tests for your changes +- Update documentation as needed +- Reference any related issues -### Project Structure - -``` -Export_Trakt_4_Letterboxd/ -├── cmd/ # Command-line applications -│ └── export_trakt/ # Main application entry point -├── pkg/ # Reusable packages -│ ├── api/ # API client for Trakt.tv -│ ├── config/ # Configuration handling -│ ├── export/ # Export functionality -│ ├── i18n/ # Internationalization -│ └── logger/ # Logging facilities -├── tests/ # Test suites -│ ├── integration/ # Integration tests -│ └── mocks/ # Mock objects for testing -├── docs/ # Documentation -├── locales/ # Translation files -└── .github/ # GitHub specific files -``` +## Development Setup -## How to Contribute +### Prerequisites -### Reporting Bugs +- A Trakt.tv account +- A Trakt.tv application (Client ID and Client Secret) +- `jq` and `curl` installed on your system -1. Check if the bug has already been reported in the [Issues](https://github.com/JohanDevl/Export_Trakt_4_Letterboxd/issues) -2. If not, create a new issue using the Bug Report template -3. Provide detailed steps to reproduce the bug -4. Include relevant information about your environment +### Local Setup -### Suggesting Enhancements +1. Clone the repository: -1. Check if the enhancement has already been suggested in the [Issues](https://github.com/JohanDevl/Export_Trakt_4_Letterboxd/issues) -2. If not, create a new issue using the Feature Request template -3. Describe the enhancement in detail and why it would be valuable + ```bash + git clone https://github.com/YOUR-USERNAME/Export_Trakt_4_Letterboxd.git + cd Export_Trakt_4_Letterboxd + ``` -### Pull Requests +2. Make the scripts executable: + + ```bash + chmod +x Export_Trakt_4_Letterboxd.sh setup_trakt.sh + ``` + +3. Configure Trakt authentication: + ```bash + ./setup_trakt.sh + ``` + +### Docker Setup -1. Update your fork to the latest main branch -2. Create a new branch for your changes -3. Make your changes following the [Development Guidelines](#development-guidelines) -4. Add or update tests as needed -5. Ensure all tests pass -6. Update documentation as required -7. Submit your pull request with a clear description of the changes +1. Clone the repository: -## Development Guidelines + ```bash + git clone https://github.com/YOUR-USERNAME/Export_Trakt_4_Letterboxd.git + cd Export_Trakt_4_Letterboxd + ``` -### Coding Standards +2. Start the container: -- Follow Go best practices and style guidelines (use `gofmt` or `goimports`) -- Use meaningful variable and function names -- Keep functions small and focused on a single responsibility -- Write clear comments for complex logic -- Document exported functions and types + ```bash + docker compose up -d + ``` -### Testing +3. Configure Trakt authentication: + ```bash + docker compose exec trakt-export ./setup_trakt.sh + ``` -- Write unit tests for new functionality -- Ensure existing tests pass with your changes -- Use integration tests for API interactions -- Aim for high test coverage, especially for critical code paths +## Testing -### Documentation +Before submitting a pull request, please test your changes thoroughly. This includes: -- Update code documentation for public APIs -- Update README.md when adding new features -- Document configuration options -- Consider adding examples for complex features +- Testing the main functionality +- Testing edge cases +- Ensuring Docker compatibility if applicable -## Release Process +## Documentation -The project follows [Semantic Versioning](https://semver.org/). For more details on the release process, see the [Release Plan](docs/RELEASE_PLAN.md). +If you're changing functionality, please update the relevant documentation in the `wiki/` directory. ## License -By contributing, you agree that your contributions will be licensed under the project's license. See the [LICENSE](LICENSE) file for details. +By contributing to this project, you agree that your contributions will be licensed under the same license as the project. + +Thank you for contributing to Export Trakt 4 Letterboxd! diff --git a/DOCKER_CHANGES.md b/DOCKER_CHANGES.md deleted file mode 100644 index d32d96f..0000000 --- a/DOCKER_CHANGES.md +++ /dev/null @@ -1,167 +0,0 @@ -# Docker Compose Changes - New Scheduling Features - -This document summarizes the changes made to the Docker Compose configuration to support the new `--run` and `--schedule` functionality. - -## Overview of Changes - -The Docker Compose configuration has been significantly enhanced to support three execution modes: - -1. **Immediate Execution (`--run`)**: Execute once and exit -2. **Scheduled Execution (`--schedule`)**: Run on a cron schedule -3. **Legacy Mode**: Traditional command-based approach (for backward compatibility) - -## New Services Added - -### Immediate Execution Services (`--run`) - -| Service Name | Profile | Command | Purpose | -| ----------------------------- | ---------------- | ----------------------------------------- | ----------------------------- | -| `export-trakt-run-watched` | `run-watched` | `--run --export watched --mode normal` | Export watched movies only | -| `export-trakt-run-all` | `run-all` | `--run --export all --mode complete` | Export all data (recommended) | -| `export-trakt-run-collection` | `run-collection` | `--run --export collection --mode normal` | Export collection only | -| `export-trakt-run-ratings` | `run-ratings` | `--run --export ratings --mode complete` | Export ratings only | -| `export-trakt-run-watchlist` | `run-watchlist` | `--run --export watchlist --mode normal` | Export watchlist only | -| `export-trakt-run-shows` | `run-shows` | `--run --export shows --mode complete` | Export shows only | - -### Scheduled Execution Services (`--schedule`) - -| Service Name | Profile | Schedule | Command | Purpose | -| ------------------------------ | ----------------- | ------------------ | ---------------------------------------------------------- | ---------------------- | -| `export-trakt-schedule-6h` | `schedule-6h` | Every 6 hours | `--schedule "0 */6 * * *" --export all --mode complete` | Production scheduler | -| `export-trakt-schedule-daily` | `schedule-daily` | Daily at 2:30 AM | `--schedule "30 2 * * *" --export all --mode complete` | Daily backup | -| `export-trakt-schedule-weekly` | `schedule-weekly` | Sundays at 3:00 AM | `--schedule "0 3 * * 0" --export all --mode complete` | Weekly backup | -| `export-trakt-schedule-15min` | `schedule-15min` | Every 15 minutes | `--schedule "*/15 * * * *" --export watched --mode normal` | High-frequency testing | -| `export-trakt-schedule-custom` | `schedule-custom` | Configurable | Custom via env vars | Custom schedule | - -## Profile Organization - -### New Profiles - -- **Immediate Execution**: `run`, `run-watched`, `run-all`, `run-collection`, `run-ratings`, `run-watchlist`, `run-shows` -- **Scheduled Execution**: `schedule`, `schedule-6h`, `schedule-daily`, `schedule-weekly`, `schedule-15min`, `schedule-test`, `schedule-custom` -- **Legacy Compatibility**: `legacy`, `legacy-scheduled` - -### Updated Profiles - -- **Default/Export**: Maintained for backward compatibility, now also tagged as `legacy` -- **Complete/Initial**: Now also tagged as `legacy` -- **Scheduled**: Now tagged as `legacy-scheduled` - -## Configuration Changes - -### Removed Obsolete Version - -```yaml -# REMOVED -version: "3.8" -``` - -The `version` attribute is no longer needed in modern Docker Compose. - -### Enhanced Custom Scheduler - -The `schedule-custom` service now properly handles environment variables: - -```yaml -export-trakt-schedule-custom: - <<: *export-trakt-base - profiles: ["schedule-custom"] - container_name: export-trakt-schedule-custom - restart: unless-stopped - environment: - - TZ=UTC - - CUSTOM_SCHEDULE=${SCHEDULE:-0 */6 * * *} - - CUSTOM_EXPORT_TYPE=${EXPORT_TYPE:-all} - - CUSTOM_EXPORT_MODE=${EXPORT_MODE:-complete} - entrypoint: ["/bin/sh", "-c"] - command: - - | - /app/export-trakt --schedule "$${CUSTOM_SCHEDULE}" --export "$${CUSTOM_EXPORT_TYPE}" --mode "$${CUSTOM_EXPORT_MODE}" -``` - -## Usage Examples - -### Quick Commands - -```bash -# Test configuration -docker compose --profile run-watched up - -# Export all data once -docker compose --profile run-all up - -# Start production scheduler -docker compose --profile schedule-6h up -d - -# Custom schedule -SCHEDULE="0 */4 * * *" EXPORT_TYPE="watched" EXPORT_MODE="normal" \ -docker compose --profile schedule-custom up -d -``` - -### Migration from Legacy - -| Old Command | New Equivalent | -| ------------------------------------------ | -------------------------------------------- | -| `docker compose up` | `docker compose --profile run-watched up` | -| `docker compose --profile complete up` | `docker compose --profile run-all up` | -| `docker compose --profile scheduled up -d` | `docker compose --profile schedule-6h up -d` | - -## Documentation Added - -### New Files - -1. **`docker/README.md`**: Comprehensive Docker usage guide -2. **`docker/test-docker.sh`**: Test script for Docker functionality -3. **`DOCKER_CHANGES.md`**: This file documenting changes - -### Updated Files - -1. **`docker-compose.yml`**: Complete restructure with new services -2. **Comment section**: Detailed usage examples in the compose file - -## Benefits - -### For Users - -- **Simplified Usage**: Clear profiles for different use cases -- **Flexible Scheduling**: Multiple pre-configured schedules -- **Better Testing**: Dedicated test profiles -- **Backward Compatibility**: Legacy services still work - -### For Development - -- **Modular Design**: Each service has a specific purpose -- **Easy Extension**: Adding new schedules is straightforward -- **Clear Separation**: Different modes are clearly separated -- **Maintainable**: Well-documented and organized - -## Testing - -The changes have been tested with: - -- ✅ Docker Compose syntax validation (`docker compose config`) -- ✅ Profile listing (`docker compose config --profiles`) -- ✅ Service validation with test script -- ✅ Environment variable handling -- ✅ Backward compatibility - -## Backward Compatibility - -All existing commands continue to work: - -- `docker compose up` (default export) -- `docker compose --profile setup up` (setup) -- `docker compose --profile complete up` (complete export) -- `docker compose --profile scheduled up -d` (legacy scheduler) - -## Future Enhancements - -The new structure makes it easy to add: - -- Additional schedule presets -- Different export configurations -- Health checks -- Monitoring integrations -- Resource limits - -This Docker Compose update provides a solid foundation for both current users and future feature development! diff --git a/Dockerfile b/Dockerfile index 2edcfbc..d72ccc9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,77 +1,76 @@ # Build stage -FROM golang:1.22-alpine AS builder +FROM alpine:3.19 as builder # Install build dependencies -RUN apk add --no-cache git gcc musl-dev - -# Set build arguments -ARG VERSION=dev -ARG COMMIT_SHA=unknown -ARG BUILD_DATE=unknown +RUN apk add --no-cache bash curl jq sed git # Set working directory -WORKDIR /app - -# Copy go module files -COPY go.mod go.sum ./ -RUN go mod download +WORKDIR /build -# Copy source code -COPY . . +# Copy only necessary build files first +COPY lib/ /build/lib/ +COPY locales/ /build/locales/ +COPY Export_Trakt_4_Letterboxd.sh setup_trakt.sh install.sh /build/ -# Build binary with version information -RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo \ - -ldflags "-s -w \ - -X github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/version.Version=${VERSION} \ - -X github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/version.CommitSHA=${COMMIT_SHA} \ - -X github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/version.BuildDate=${BUILD_DATE}" \ - -o export-trakt ./cmd/export_trakt +# Make scripts executable +RUN chmod +x /build/*.sh +RUN find /build/lib -name "*" -exec chmod +x {} \; -# Runtime stage +# Final stage FROM alpine:3.19 -# Install CA certificates for HTTPS -RUN apk add --no-cache ca-certificates tzdata - -# Create non-root user -RUN addgroup -S appgroup && adduser -S appuser -G appgroup +# Define build arguments for version +ARG APP_VERSION=dev +ARG BUILD_DATE +ARG VCS_REF + +# Add metadata labels using OCI standard +LABEL org.opencontainers.image.version=$APP_VERSION \ + org.opencontainers.image.created=$BUILD_DATE \ + org.opencontainers.image.revision=$VCS_REF \ + org.opencontainers.image.title="Export Trakt 4 Letterboxd" \ + org.opencontainers.image.description="Tool for exporting Trakt.tv history to Letterboxd compatible format" \ + org.opencontainers.image.url="https://github.com/JohanDevl/Export_Trakt_4_Letterboxd" \ + org.opencontainers.image.documentation="https://github.com/JohanDevl/Export_Trakt_4_Letterboxd/blob/main/README.md" \ + maintainer="JohanDevl" + +# Install required runtime packages (minimal set) +RUN apk add --no-cache bash curl jq sed ca-certificates tzdata dcron \ + && addgroup -S appgroup && adduser -S appuser -G appgroup # Set working directory WORKDIR /app -# Create directories and set permissions -RUN mkdir -p /app/config /app/logs /app/exports \ - && chown -R appuser:appgroup /app - -# Copy binary from builder stage -COPY --from=builder /app/export-trakt /app/export-trakt +# Copy application files from builder +COPY --from=builder /build /app/ +COPY docker-entrypoint.sh /app/ -# Copy locales -COPY --from=builder /app/locales /app/locales +# Create necessary directories with proper permissions +RUN mkdir -p /app/backup /app/logs /app/copy /app/TEMP /app/config \ + && chmod +x /app/*.sh \ + && chmod -R 755 /app/lib \ + && chown -R appuser:appgroup /app/backup /app/logs /app/copy /app/TEMP /app/config \ + && chmod -R 777 /app/TEMP \ + && mkdir -p /var/spool/cron/crontabs \ + && touch /var/spool/cron/crontabs/root \ + && chmod 600 /var/spool/cron/crontabs/root # Set environment variables -ENV EXPORT_TRAKT_EXPORT_OUTPUT_DIR=/app/exports -ENV EXPORT_TRAKT_LOGGING_FILE=/app/logs/export.log - -# Switch to non-root user -USER appuser - -# Create volumes for persistent data -VOLUME ["/app/config", "/app/logs", "/app/exports"] +ENV DOSLOG=/app/logs \ + DOSCOPY=/app/copy \ + BACKUP_DIR=/app/backup \ + CONFIG_DIR=/app/config \ + CRON_SCHEDULE="" \ + EXPORT_OPTION="normal" \ + APP_VERSION=$APP_VERSION \ + TZ=UTC + +# Set volume for persistent data +VOLUME ["/app/logs", "/app/copy", "/app/backup", "/app/config"] + +# Health check +HEALTHCHECK --interval=1m --timeout=10s --start-period=30s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 # Set entrypoint -ENTRYPOINT ["/app/export-trakt"] - -# Default command if none is provided -CMD ["--help"] - -# Metadata -LABEL org.opencontainers.image.title="Export Trakt for Letterboxd" -LABEL org.opencontainers.image.description="Tool to export Trakt.tv data for Letterboxd import" -LABEL org.opencontainers.image.authors="JohanDevl" -LABEL org.opencontainers.image.url="https://github.com/JohanDevl/Export_Trakt_4_Letterboxd" -LABEL org.opencontainers.image.source="https://github.com/JohanDevl/Export_Trakt_4_Letterboxd" -LABEL org.opencontainers.image.version="${VERSION}" -LABEL org.opencontainers.image.created="${BUILD_DATE}" -LABEL org.opencontainers.image.revision="${COMMIT_SHA}" -LABEL org.opencontainers.image.licenses="MIT" \ No newline at end of file +ENTRYPOINT ["/app/docker-entrypoint.sh"] \ No newline at end of file diff --git a/Export_Trakt_4_Letterboxd.sh b/Export_Trakt_4_Letterboxd.sh new file mode 100755 index 0000000..6f54859 --- /dev/null +++ b/Export_Trakt_4_Letterboxd.sh @@ -0,0 +1,77 @@ +#!/bin/bash +# +# Export_Trakt_4_Letterboxd - Main Script +# This script exports your Trakt.tv watch history to a CSV format compatible with Letterboxd import. +# Author: Johan +# + +# Get script directory (resolving symlinks) +SCRIPT_DIR="$( cd "$( dirname "$(readlink -f "${BASH_SOURCE[0]}" 2>/dev/null || echo "${BASH_SOURCE[0]}")" )" && pwd )" + +# Color codes +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +CYAN='\033[0;36m' +BLUE='\033[0;34m' +PURPLE='\033[0;35m' +SAISPAS='\033[0;36m' +BOLD='\033[1m' +NC='\033[0m' + +# Initialize logging +CONFIG_DIR="${SCRIPT_DIR}/config" +LOG_TIMESTAMP=$(date +"%Y-%m-%d_%H-%M-%S") +LOG="${SCRIPT_DIR}/logs/Export_Trakt_4_Letterboxd_${LOG_TIMESTAMP}.log" + +# Create logs directory if it doesn't exist +mkdir -p "${SCRIPT_DIR}/logs" + +# Source the main module +if [ -f "${SCRIPT_DIR}/lib/main.sh" ]; then + source "${SCRIPT_DIR}/lib/main.sh" +else + echo -e "${RED}ERROR: Main module not found. Did you run the setup script?${NC}" | tee -a "${LOG}" + exit 1 +fi + +# Import modules +import_modules "$SCRIPT_DIR" + +# Default global variables +TEMP_DIR="${SCRIPT_DIR}/TEMP" +DOSLOG="${SCRIPT_DIR}/logs" +DOSCOPY="${SCRIPT_DIR}/copy" +BACKUP_DIR="${SCRIPT_DIR}/backup" + +# Load configuration (needed for language settings) +[ -f "${CONFIG_DIR}/.config.cfg" ] && source "${CONFIG_DIR}/.config.cfg" + +# Initialize internationalization - use language from config or auto-detect +init_i18n "$SCRIPT_DIR" "$LOG" + +# Log header +echo "===============================================================" | tee -a "${LOG}" +echo -e "${GREEN}$(_ "welcome") - $(_ "starting")${NC}" | tee -a "${LOG}" +echo "===============================================================" | tee -a "${LOG}" +echo -e "${BLUE}$(date) - $(_ "script_execution_start")${NC}" | tee -a "${LOG}" + +# Example of using internationalization +echo -e "${CYAN}$(_ "WELCOME") - $(_ "running_in") $(hostname)${NC}" | tee -a "${LOG}" +echo -e "${YELLOW}$(_ "language_set"): ${LANGUAGE:-$(_ "auto_detected")}${NC}" | tee -a "${LOG}" + +# Check if we are running in Docker +if [ -f "/.dockerenv" ]; then + echo -e "${CYAN}$(_ "running_docker")${NC}" | tee -a "${LOG}" + # Docker-specific settings can be added here +fi + +# Parse command line argument (if any) +OPTION="$1" +echo -e "${YELLOW}$(_ "script_option"): ${OPTION:-$(_ "none")}${NC}" | tee -a "${LOG}" + +# Run the export process +run_export "$SCRIPT_DIR" "$OPTION" + +# Exit with success +exit 0 diff --git a/README.md b/README.md index 55b1cda..6f3ad14 100755 --- a/README.md +++ b/README.md @@ -11,28 +11,18 @@ [![Docker Image Size](https://img.shields.io/docker/image-size/johandevl/export-trakt-4-letterboxd/latest?logo=docker&label=Image%20Size)](https://hub.docker.com/r/johandevl/export-trakt-4-letterboxd) [![Docker Pulls](https://img.shields.io/docker/pulls/johandevl/export-trakt-4-letterboxd?logo=docker&label=Pulls)](https://hub.docker.com/r/johandevl/export-trakt-4-letterboxd) [![Platforms](https://img.shields.io/badge/platforms-amd64%20|%20arm64%20|%20armv7-lightgrey?logo=docker)](https://hub.docker.com/r/johandevl/export-trakt-4-letterboxd/tags) -[![Code Coverage](https://img.shields.io/badge/coverage-78%25-brightgreen)](coverage.html) [![Trakt.tv](https://img.shields.io/badge/Trakt.tv-ED1C24?logo=trakt&logoColor=white)](https://trakt.tv) [![Letterboxd](https://img.shields.io/badge/Letterboxd-00D735?logo=letterboxd&logoColor=white)](https://letterboxd.com) This project allows you to export your Trakt.tv data to a format compatible with Letterboxd. -## 🚀 Go Implementation 🚀 - -This application is now built entirely in Go, providing: - -- Modern, modular Go architecture with clean separation of concerns -- Improved error handling and logging with multiple levels -- Internationalization (i18n) support for multiple languages -- Robust test coverage (over 80% across all packages) -- Enhanced Trakt.tv API client with retry mechanism and rate limiting - ## Quick Start ### Prerequisites - A Trakt.tv account - A Trakt.tv application (Client ID and Client Secret) +- jq and curl (for local installation) - Docker (for containerized installation) ### Using Docker (Recommended) @@ -57,7 +47,8 @@ This application is now built entirely in Go, providing: docker run -it --name trakt-export \ -v $(pwd)/config:/app/config \ -v $(pwd)/logs:/app/logs \ - -v $(pwd)/exports:/app/exports \ + -v $(pwd)/copy:/app/copy \ + -v $(pwd)/backup:/app/backup \ johandevl/export-trakt-4-letterboxd:latest ``` @@ -67,7 +58,9 @@ This application is now built entirely in Go, providing: docker compose --profile scheduled up -d ``` -### Local Installation (From Source) +See [Docker Usage Guide](docs/DOCKER_USAGE.md) for more details. + +### Local Installation 1. Clone the repository: @@ -76,16 +69,23 @@ This application is now built entirely in Go, providing: cd Export_Trakt_4_Letterboxd ``` -2. Build the Go application: +2. Run the installation script: + + ```bash + ./install.sh + ``` + +3. Configure Trakt authentication: ```bash - go build -o export_trakt ./cmd/export_trakt/ + ./setup_trakt.sh ``` -3. Run the application: +4. Export your data: ```bash - ./export_trakt --config ./config/config.toml + ./Export_Trakt_4_Letterboxd.sh [option] ``` + Options: `normal` (default), `initial`, or `complete` ## Features @@ -93,78 +93,37 @@ This application is now built entirely in Go, providing: - Export watch history with dates and ratings - Export watchlist items - Automatic detection of rewatched movies -- Supports various export modes +- Supports various export modes (normal, initial, complete) - Modular code structure for better maintainability -- Automated exports with scheduling +- Automated exports with cron - Docker support -- Complete Go implementation with improved performance and reliability - -## Scheduling and Automation - -The application supports scheduled exports using cron-like expressions through the `EXPORT_SCHEDULE` environment variable. - -### Cron Scheduling - -When running in `schedule` mode, the application will use the `EXPORT_SCHEDULE` environment variable to determine when to run exports: - -```bash -# Run the scheduler with a specific schedule (every 5 minutes) -EXPORT_SCHEDULE="*/5 * * * *" EXPORT_MODE="complete" EXPORT_TYPE="all" ./export_trakt schedule -``` - -### Using Docker Compose - -The Docker Compose file includes a pre-configured scheduled service: - -```bash -# Run the scheduler in Docker -docker compose --profile scheduled up -d -``` - -This will start a container that runs exports according to the schedule defined in the `EXPORT_SCHEDULE` environment variable in the docker-compose.yml file. - -### Customizing the Schedule - -You can customize the schedule by modifying the `EXPORT_SCHEDULE` variable in the docker-compose.yml file: - -```yaml -environment: - - EXPORT_SCHEDULE=0 4 * * * # Run daily at 4 AM - - EXPORT_MODE=complete - - EXPORT_TYPE=all -``` - -Common cron schedule examples: - -- `*/5 * * * *`: Every 5 minutes -- `0 * * * *`: Every hour -- `0 4 * * *`: Every day at 4 AM -- `0 4 * * 0`: Every Sunday at 4 AM -- `0 4 1 * *`: On the 1st day of each month at 4 AM ## Project Structure -The Go implementation follows a modern application structure: +The codebase has been modularized for better maintenance and readability: ``` Export_Trakt_4_Letterboxd/ -├── cmd/ # Application entry points -│ └── export_trakt/ # Main executable -├── internal/ # Private application code -│ ├── models/ # Data models -│ └── utils/ # Private utilities -├── pkg/ # Packages for core functionality -│ ├── api/ # Trakt.tv API client -│ ├── config/ # Configuration management -│ ├── export/ # Export functionality -│ ├── i18n/ # Internationalization support -│ └── logger/ # Logging system -├── locales/ # Translation files -│ ├── en.json # English translations -│ └── fr.json # French translations +├── lib/ # Library modules +│ ├── config.sh # Configuration management +│ ├── utils.sh # Utility functions and debugging +│ ├── trakt_api.sh # API interaction functions +│ ├── data_processing.sh # Data transformation functions +│ └── main.sh # Main orchestration module ├── config/ # Configuration files -├── build/ # Compiled binaries -└── logs/ # Log output +├── logs/ # Log output +├── backup/ # Backup of API responses +├── TEMP/ # Temporary processing files +├── copy/ # Output CSV files +├── tests/ # Automated tests +│ ├── unit/ # Unit tests for library modules +│ ├── integration/ # Integration tests +│ ├── mocks/ # Mock API responses +│ ├── run_tests.sh # Test runner script +│ └── test_helper.bash # Test helper functions +├── Export_Trakt_4_Letterboxd.sh # Main script (simplified) +├── setup_trakt.sh # Authentication setup +└── install.sh # Installation script ``` ## Testing @@ -173,59 +132,46 @@ The project includes comprehensive automated tests to ensure code quality and pr ### Running Tests -To run the tests, you need to have Go installed. +To run the tests, you need to have the following dependencies installed: -Run all tests: - -```bash -go test -v ./... -``` +- jq +- bats-core (installed as Git submodule) -Generate a coverage report: +Run all tests: ```bash -go test -coverprofile=coverage.out ./... -go tool cover -html=coverage.out -o coverage.html +./tests/run_tests.sh ``` -Run the coverage script (excludes main package): +Generate a coverage report (requires kcov): ```bash -./scripts/coverage.sh +./tests/run_tests.sh coverage ``` -The project maintains over 78% test coverage across the core packages, which helps ensure reliability and stability. The coverage includes: - -- API Client: 73.3% covered -- Config Management: 85.4% covered -- Export Functionality: 78.3% covered -- Internationalization: 81.6% covered -- Logging System: 97.7% covered +### Testing Framework -### Code Coverage Configuration +The testing framework uses: -The project includes a `.codecov.yml` file that configures code coverage analysis for CI/CD pipelines. This configuration: +- Bats (Bash Automated Testing System) for running tests +- Mock API responses to test without real API calls +- Integration tests to verify the complete export process +- Unit tests for core library functions -- Sets a 70% coverage threshold for the project -- Excludes the `cmd/export_trakt` directory (main package) from coverage calculations -- Provides detailed coverage reports for each pull request +### Continuous Integration -If you're using GitHub Actions or another CI system, this configuration ensures accurate coverage reporting focused on the core packages rather than the main application entry point. +Tests are automatically run in the CI/CD pipeline for every pull request to ensure code quality before merging. ## Documentation -Complete documentation is available in the [project Wiki](https://github.com/JohanDevl/Export_Trakt_4_Letterboxd/wiki). +For more detailed information, please refer to the documentation in the `docs` folder: -You will find: - -- [Installation Guide](https://github.com/JohanDevl/Export_Trakt_4_Letterboxd/wiki/Installation) -- [CLI Reference](https://github.com/JohanDevl/Export_Trakt_4_Letterboxd/wiki/CLI-Reference) -- [Export Features](https://github.com/JohanDevl/Export_Trakt_4_Letterboxd/wiki/Export-Features) -- [Trakt API Guide](https://github.com/JohanDevl/Export_Trakt_4_Letterboxd/wiki/Trakt-API-Guide) -- [Internationalization](https://github.com/JohanDevl/Export_Trakt_4_Letterboxd/wiki/Internationalization) -- [Migration Guide](https://github.com/JohanDevl/Export_Trakt_4_Letterboxd/wiki/Migration-Guide) -- [Testing](https://github.com/JohanDevl/Export_Trakt_4_Letterboxd/wiki/Testing) -- [CI/CD](https://github.com/JohanDevl/Export_Trakt_4_Letterboxd/wiki/CI-CD) +- [Configuration and Basic Usage](docs/CONFIGURATION.md) +- [Docker Usage Guide](docs/DOCKER_USAGE.md) +- [Docker Testing](docs/DOCKER_TESTING.md) +- [GitHub Actions](docs/GITHUB_ACTIONS.md) +- [Automatic Version Tagging](docs/AUTO_TAGGING.md) +- [Testing Framework](docs/TESTING.md) ## Troubleshooting @@ -233,7 +179,7 @@ If you encounter issues: 1. Check that your Trakt.tv profile is public 2. Verify your authentication configuration -3. Ensure your config.toml file has the correct Trakt.tv client ID and secret +3. Run `./setup_trakt.sh` again to refresh your tokens 4. Check logs in the `logs` directory for detailed error information ## Acknowledgements @@ -255,38 +201,3 @@ The original work by u2pitchjami is also licensed under the MIT License. This fo - Twitter: [@0xUta](https://twitter.com/0xUta) - Github: [@JohanDevl](https://github.com/JohanDevl) - LinkedIn: [@johan-devlaminck](https://linkedin.com/in/johan-devlaminck) - -## Letterboxd Import Export Format - -A new export format has been added to generate files compatible with Letterboxd's import functionality. To use this feature: - -1. Set `extended_info = "letterboxd"` in your `config.toml` file -2. Run the application normally or with Docker (see below) - -The format includes the following fields: - -- Title: Movie title (quoted) -- Year: Release year -- imdbID: IMDB ID for the movie -- tmdbID: TMDB ID for the movie -- WatchedDate: Date the movie was watched -- Rating10: Rating on a scale of 1-10 -- Rewatch: Whether the movie has been watched multiple times (true/false) - -### Using with Docker - -To use the Letterboxd export format with Docker: - -```bash -# Create directories for the Docker volumes -mkdir -p config logs exports - -# Copy the example config file and edit it -cp config.example.toml config/config.toml - -# Edit the config file to set extended_info = "letterboxd" -# Then run: -docker run --rm -v $(pwd)/config:/app/config -v $(pwd)/logs:/app/logs -v $(pwd)/exports:/app/exports johandevl/export-trakt-4-letterboxd:latest -``` - -The output file will be saved as `letterboxd_import.csv` in your exports directory. diff --git a/build-docker.sh b/build-docker.sh new file mode 100755 index 0000000..ea8c036 --- /dev/null +++ b/build-docker.sh @@ -0,0 +1,186 @@ +#!/bin/bash +# Script for building multi-architecture Docker images for Export_Trakt_4_Letterboxd + +set -e + +# Default settings +REGISTRY="docker.io" +REPOSITORY="johandevl/export-trakt-4-letterboxd" +DEFAULT_TAG="latest" +PLATFORMS="linux/amd64,linux/arm64,linux/arm/v7" + +# Usage info +show_help() { + echo "Usage: $0 [OPTIONS]" + echo "Build and push multi-architecture Docker images for Export_Trakt_4_Letterboxd" + echo "" + echo "Options:" + echo " -h, --help Show this help message" + echo " -t, --tag TAG Specify Docker image tag (default: $DEFAULT_TAG)" + echo " -v, --version VERSION Specify application version (default: derived from tag)" + echo " -p, --platforms PLATFORMS Specify platforms to build for (default: $PLATFORMS)" + echo " -n, --no-push Build but don't push images" + echo " -l, --local Build for local platform only" + echo " --dry-run Show commands without executing" + echo "" + echo "Example:" + echo " $0 --tag v1.0.0 --version 1.0.0" +} + +# Parse arguments +TAG=$DEFAULT_TAG +VERSION="" +NO_PUSH=false +LOCAL_ONLY=false +DRY_RUN=false + +while [[ $# -gt 0 ]]; do + key="$1" + case $key in + -h|--help) + show_help + exit 0 + ;; + -t|--tag) + TAG="$2" + shift + shift + ;; + -v|--version) + VERSION="$2" + shift + shift + ;; + -p|--platforms) + PLATFORMS="$2" + shift + shift + ;; + -n|--no-push) + NO_PUSH=true + shift + ;; + -l|--local) + LOCAL_ONLY=true + PLATFORMS="" + shift + ;; + --dry-run) + DRY_RUN=true + shift + ;; + *) + echo "Unknown option: $1" + show_help + exit 1 + ;; + esac +done + +# If version not specified, derive from tag +if [[ -z "$VERSION" ]]; then + # Remove 'v' prefix if present + VERSION=$(echo "$TAG" | sed 's/^v//') + if [[ "$VERSION" == "latest" ]]; then + VERSION="dev" + fi +fi + +# Get current date in ISO 8601 format +BUILD_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ") + +# Get Git commit hash +VCS_REF=$(git rev-parse --short HEAD 2>/dev/null || echo "unknown") + +echo "========================================================" +echo "Building Docker image for Export_Trakt_4_Letterboxd" +echo "========================================================" +echo "Image tag: $REGISTRY/$REPOSITORY:$TAG" +echo "Version: $VERSION" +echo "Build date: $BUILD_DATE" +echo "Git commit: $VCS_REF" +echo "Platforms: ${PLATFORMS:-local platform only}" +echo "Push images: $(if $NO_PUSH; then echo "No"; else echo "Yes"; fi)" +echo "========================================================" + +# Check if Docker buildx is available +if ! docker buildx version &>/dev/null; then + echo "Error: Docker buildx is not available. Please install it first." + exit 1 +fi + +# Create a new builder instance if not in local-only mode +if [[ "$LOCAL_ONLY" == "false" ]]; then + BUILDER_NAME="export-trakt-builder" + + # Check if builder exists, create if not + if ! docker buildx inspect "$BUILDER_NAME" &>/dev/null; then + echo "Creating new buildx builder: $BUILDER_NAME" + if [[ "$DRY_RUN" == "false" ]]; then + docker buildx create --name "$BUILDER_NAME" --use + else + echo "[DRY RUN] docker buildx create --name \"$BUILDER_NAME\" --use" + fi + else + echo "Using existing buildx builder: $BUILDER_NAME" + if [[ "$DRY_RUN" == "false" ]]; then + docker buildx use "$BUILDER_NAME" + else + echo "[DRY RUN] docker buildx use \"$BUILDER_NAME\"" + fi + fi +fi + +# Build command components +BUILD_ARGS=( + --build-arg "APP_VERSION=$VERSION" + --build-arg "BUILD_DATE=$BUILD_DATE" + --build-arg "VCS_REF=$VCS_REF" +) + +TAG_ARGS=( + -t "$REGISTRY/$REPOSITORY:$TAG" +) + +# Add latest tag if this is a version tag +if [[ "$TAG" =~ ^v?[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + TAG_ARGS+=(-t "$REGISTRY/$REPOSITORY:latest") +fi + +# Set platform args if not in local-only mode +PLATFORM_ARGS=() +if [[ "$LOCAL_ONLY" == "false" && -n "$PLATFORMS" ]]; then + PLATFORM_ARGS=(--platform "$PLATFORMS") +fi + +# Set output type based on push/no-push +OUTPUT_ARGS=() +if [[ "$NO_PUSH" == "true" ]]; then + if [[ "$LOCAL_ONLY" == "true" ]]; then + OUTPUT_ARGS=(--load) + else + OUTPUT_ARGS=(--output "type=image,push=false") + fi +else + OUTPUT_ARGS=(--push) +fi + +# Build the image +echo "Building Docker image..." +BUILD_CMD=(docker buildx build "${BUILD_ARGS[@]}" "${TAG_ARGS[@]}" "${PLATFORM_ARGS[@]}" "${OUTPUT_ARGS[@]}" .) + +if [[ "$DRY_RUN" == "true" ]]; then + echo "[DRY RUN] ${BUILD_CMD[*]}" +else + "${BUILD_CMD[@]}" + + echo "========================================================" + if [[ "$NO_PUSH" == "true" ]]; then + echo "Build completed. Images were not pushed." + else + echo "Build completed. Images were pushed to registry." + fi + echo "========================================================" +fi + +exit 0 \ No newline at end of file diff --git a/cmd/export_trakt/main.go b/cmd/export_trakt/main.go deleted file mode 100644 index 28c3c8d..0000000 --- a/cmd/export_trakt/main.go +++ /dev/null @@ -1,581 +0,0 @@ -package main - -import ( - "flag" - "fmt" - "os" - "os/signal" - "strings" - "syscall" - "time" - - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/scheduler" - "github.com/robfig/cron/v3" -) - -func main() { - // Add panic recovery to catch unhandled errors - defer func() { - if r := recover(); r != nil { - fmt.Printf("PANIC: %v\n", r) - os.Exit(1) - } - }() - - // Parse command line flags - configPath := flag.String("config", "config/config.toml", "Path to configuration file") - exportType := flag.String("export", "watched", "Type of export (watched, collection, shows, ratings, watchlist, all)") - exportMode := flag.String("mode", "normal", "Export mode (normal, initial, complete)") - runOnce := flag.Bool("run", false, "Run the script immediately once then exit") - scheduleFlag := flag.String("schedule", "", "Run the script according to cron schedule format (e.g., '0 */6 * * *' for every 6 hours)") - flag.Parse() - - // Get command from args - command := "export" // Default command - if len(flag.Args()) > 0 { - command = flag.Args()[0] - } - - // Initialize logger - log := logger.NewLogger() - - // Load configuration - log.Info("startup.loading_config", map[string]interface{}{"path": *configPath}) - cfg, err := config.LoadConfig(*configPath) - if err != nil { - log.Error("errors.config_load_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } - - // Configure logger based on config - log.SetLogLevel(cfg.Logging.Level) - if cfg.Logging.File != "" && os.Getenv("DISABLE_LOG_FILE") == "" { - if err := log.SetLogFile(cfg.Logging.File); err != nil { - log.Error("errors.log_file_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } - } - - // Initialize translator - translator, err := i18n.NewTranslator(&cfg.I18n, log) - if err != nil { - log.Error("errors.translator_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } - - // Update logger to use translator - log.SetTranslator(translator) - - // Handle --run flag (immediate execution) - if *runOnce { - log.Info("startup.run_once_mode", map[string]interface{}{ - "export_type": *exportType, - "export_mode": *exportMode, - }) - runExportOnce(cfg, log, *exportType, *exportMode) - return - } - - // Handle --schedule flag (cron scheduling) - if *scheduleFlag != "" { - log.Info("startup.schedule_mode", map[string]interface{}{ - "schedule": *scheduleFlag, - "export_type": *exportType, - "export_mode": *exportMode, - }) - runWithSchedule(cfg, log, *scheduleFlag, *exportType, *exportMode) - return - } - - log.Info("startup.starting", map[string]interface{}{ - "command": command, - "mode": *exportMode, // Log the export mode - }) - log.Info("startup.config_loaded", nil) - - // Initialize Trakt client - traktClient := api.NewClient(cfg, log) - - // Process command - switch strings.ToLower(command) { - case "export": - // Initialize Letterboxd exporter - letterboxdExporter := export.NewLetterboxdExporter(cfg, log) - - // Log export mode - log.Info("export.mode", map[string]interface{}{ - "mode": *exportMode, - }) - - // Perform the export based on type - log.Info("export.starting_data_retrieval", map[string]interface{}{ - "export_type": *exportType, - }) - - switch *exportType { - case "watched": - exportWatchedMovies(traktClient, letterboxdExporter, log) - case "collection": - exportCollection(traktClient, letterboxdExporter, log) - case "shows": - exportShows(traktClient, letterboxdExporter, log) - case "ratings": - exportRatings(traktClient, letterboxdExporter, log) - case "watchlist": - exportWatchlist(traktClient, letterboxdExporter, log) - case "all": - exportWatchedMovies(traktClient, letterboxdExporter, log) - exportCollection(traktClient, letterboxdExporter, log) - exportShows(traktClient, letterboxdExporter, log) - exportRatings(traktClient, letterboxdExporter, log) - exportWatchlist(traktClient, letterboxdExporter, log) - default: - log.Error("errors.invalid_export_type", map[string]interface{}{"type": *exportType}) - fmt.Printf("Invalid export type: %s. Valid types are 'watched', 'collection', 'shows', 'ratings', 'watchlist', or 'all'\n", *exportType) - os.Exit(1) - } - - fmt.Println(translator.Translate("app.description", nil)) - - case "schedule": - // Initialize scheduler - sched := scheduler.NewScheduler(cfg, log) - - // Set export mode and type to environment variables for the scheduler - os.Setenv("EXPORT_MODE", *exportMode) - os.Setenv("EXPORT_TYPE", *exportType) - - // Start scheduler (this will block until the program is terminated) - if err := sched.Start(); err != nil { - log.Error("scheduler.start_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } - - // Block forever (or until SIGINT/SIGTERM) - select {} - - case "setup": - // Handle setup command - just inform for now - fmt.Println(translator.Translate("setup.instructions", nil)) - - case "validate": - // Validate the configuration - fmt.Println(translator.Translate("validate.success", nil)) - - default: - log.Error("errors.invalid_command", map[string]interface{}{"command": command}) - fmt.Printf("Invalid command: %s. Valid commands are 'export', 'schedule', 'setup', 'validate'\n", command) - os.Exit(1) - } -} - -func exportWatchedMovies(client *api.Client, exporter *export.LetterboxdExporter, log logger.Logger) { - // Get watched movies - log.Info("export.retrieving_watched_movies", nil) - movies, err := client.GetWatchedMovies() - if err != nil { - log.Error("errors.api_request_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } - - log.Info("export.movies_retrieved", map[string]interface{}{"count": len(movies)}) - - // If extended_info is set to "letterboxd", export in Letterboxd format - if client.GetConfig().Trakt.ExtendedInfo == "letterboxd" { - // Get ratings for Letterboxd format - log.Info("export.retrieving_ratings", nil) - ratings, err := client.GetRatings() - if err != nil { - log.Error("errors.api_request_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } - - log.Info("export.ratings_retrieved", map[string]interface{}{"count": len(ratings)}) - - // Export in Letterboxd format - log.Info("export.exporting_letterboxd_format", nil) - if err := exporter.ExportLetterboxdFormat(movies, ratings); err != nil { - log.Error("export.export_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } - return - } - - // Export movies in standard format - log.Info("export.exporting_watched_movies", nil) - if err := exporter.ExportMovies(movies); err != nil { - log.Error("export.export_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } -} - -func exportCollection(client *api.Client, exporter *export.LetterboxdExporter, log logger.Logger) { - // Get collection movies - log.Info("export.retrieving_collection", nil) - movies, err := client.GetCollectionMovies() - if err != nil { - log.Error("errors.api_request_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } - - log.Info("export.collection_retrieved", map[string]interface{}{"count": len(movies)}) - - // Export collection - log.Info("export.exporting_collection", nil) - if err := exporter.ExportCollectionMovies(movies); err != nil { - log.Error("export.export_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } -} - -func exportShows(client *api.Client, exporter *export.LetterboxdExporter, log logger.Logger) { - // Get watched shows - log.Info("export.retrieving_watched_shows", nil) - shows, err := client.GetWatchedShows() - if err != nil { - log.Error("errors.api_request_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } - - // Count total episodes - episodeCount := 0 - for _, show := range shows { - for _, season := range show.Seasons { - episodeCount += len(season.Episodes) - } - } - - log.Info("export.shows_retrieved", map[string]interface{}{ - "shows": len(shows), - "episodes": episodeCount, - }) - - // Export shows - log.Info("export.exporting_shows", nil) - if err := exporter.ExportShows(shows); err != nil { - log.Error("export.export_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } -} - -func exportRatings(client *api.Client, exporter *export.LetterboxdExporter, log logger.Logger) { - // Get ratings - log.Info("export.retrieving_ratings", nil) - ratings, err := client.GetRatings() - if err != nil { - log.Error("errors.api_request_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } - - log.Info("export.ratings_retrieved", map[string]interface{}{"count": len(ratings)}) - - // Export ratings - log.Info("export.exporting_ratings", nil) - if err := exporter.ExportRatings(ratings); err != nil { - log.Error("export.export_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } -} - -func exportWatchlist(client *api.Client, exporter *export.LetterboxdExporter, log logger.Logger) { - // Get watchlist - log.Info("export.retrieving_watchlist", nil) - watchlist, err := client.GetWatchlist() - if err != nil { - log.Error("errors.api_request_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } - - log.Info("export.watchlist_retrieved", map[string]interface{}{"count": len(watchlist)}) - - // Export watchlist - log.Info("export.exporting_watchlist", nil) - if err := exporter.ExportWatchlist(watchlist); err != nil { - log.Error("export.export_failed", map[string]interface{}{"error": err.Error()}) - os.Exit(1) - } -} - -// runExportOnce executes the export once and then exits -func runExportOnce(cfg *config.Config, log logger.Logger, exportType, exportMode string) { - log.Info("export.starting_execution", map[string]interface{}{ - "export_type": exportType, - "export_mode": exportMode, - "timestamp": time.Now().Format(time.RFC3339), - }) - - // Initialize Trakt client - log.Info("export.initializing_trakt_client", nil) - traktClient := api.NewClient(cfg, log) - - // Initialize Letterboxd exporter - log.Info("export.initializing_letterboxd_exporter", nil) - letterboxdExporter := export.NewLetterboxdExporter(cfg, log) - - // Log export mode - log.Info("export.mode", map[string]interface{}{ - "mode": exportMode, - }) - - // Perform the export based on type - log.Info("export.starting_data_retrieval", map[string]interface{}{ - "export_type": exportType, - }) - - switch exportType { - case "watched": - log.Info("export.executing_watched_movies", nil) - exportWatchedMovies(traktClient, letterboxdExporter, log) - case "collection": - log.Info("export.executing_collection", nil) - exportCollection(traktClient, letterboxdExporter, log) - case "shows": - log.Info("export.executing_shows", nil) - exportShows(traktClient, letterboxdExporter, log) - case "ratings": - log.Info("export.executing_ratings", nil) - exportRatings(traktClient, letterboxdExporter, log) - case "watchlist": - log.Info("export.executing_watchlist", nil) - exportWatchlist(traktClient, letterboxdExporter, log) - case "all": - log.Info("export.executing_all_types", nil) - exportWatchedMovies(traktClient, letterboxdExporter, log) - exportCollection(traktClient, letterboxdExporter, log) - exportShows(traktClient, letterboxdExporter, log) - exportRatings(traktClient, letterboxdExporter, log) - exportWatchlist(traktClient, letterboxdExporter, log) - default: - log.Error("errors.invalid_export_type", map[string]interface{}{"type": exportType}) - fmt.Printf("Invalid export type: %s. Valid types are 'watched', 'collection', 'shows', 'ratings', 'watchlist', or 'all'\n", exportType) - os.Exit(1) - } - - log.Info("export.completed_successfully", map[string]interface{}{ - "export_type": exportType, - "export_mode": exportMode, - "timestamp": time.Now().Format(time.RFC3339), - }) -} - -// getConfiguredTimezone returns the configured timezone or UTC as fallback -func getConfiguredTimezone(cfg *config.Config, log logger.Logger) *time.Location { - // Try environment variable first (Docker TZ) - if tz := os.Getenv("TZ"); tz != "" { - if loc, err := time.LoadLocation(tz); err == nil { - log.Info("scheduler.using_env_timezone", map[string]interface{}{ - "timezone": tz, - }) - return loc - } - log.Warn("scheduler.invalid_env_timezone", map[string]interface{}{ - "timezone": tz, - }) - } - - // Try config timezone - if cfg.Export.Timezone != "" { - if loc, err := time.LoadLocation(cfg.Export.Timezone); err == nil { - log.Info("scheduler.using_config_timezone", map[string]interface{}{ - "timezone": cfg.Export.Timezone, - }) - return loc - } - log.Warn("scheduler.invalid_config_timezone", map[string]interface{}{ - "timezone": cfg.Export.Timezone, - }) - } - - // Fallback to UTC - log.Info("scheduler.using_default_timezone", map[string]interface{}{ - "timezone": "UTC", - }) - return time.UTC -} - -// runWithSchedule sets up a cron scheduler and runs the export according to the schedule -func runWithSchedule(cfg *config.Config, log logger.Logger, schedule, exportType, exportMode string) { - log.Info("scheduler.initializing", map[string]interface{}{ - "schedule": schedule, - "export_type": exportType, - "export_mode": exportMode, - }) - - // Check for verbose logging environment variable - if os.Getenv("EXPORT_VERBOSE") == "true" { - log.SetLogLevel("debug") - log.Info("scheduler.verbose_mode_enabled", nil) - } - - // Override log level if LOG_LEVEL environment variable is set - if logLevel := os.Getenv("LOG_LEVEL"); logLevel != "" { - log.SetLogLevel(logLevel) - log.Info("scheduler.log_level_set", map[string]interface{}{ - "level": logLevel, - }) - } - - // Get configured timezone for display - configuredTZ := getConfiguredTimezone(cfg, log) - - // Validate cron expression - cronParser := cron.NewParser(cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow) - _, err := cronParser.Parse(schedule) - if err != nil { - log.Error("errors.invalid_cron_schedule", map[string]interface{}{ - "schedule": schedule, - "error": err.Error(), - }) - fmt.Printf("Invalid cron schedule format: %s\nError: %s\n", schedule, err.Error()) - fmt.Println("Example formats:") - fmt.Println(" '0 */6 * * *' - Every 6 hours") - fmt.Println(" '0 9 * * 1' - Every Monday at 9:00 AM") - fmt.Println(" '30 14 * * *' - Every day at 2:30 PM") - os.Exit(1) - } - - log.Info("scheduler.cron_validation_successful", map[string]interface{}{ - "schedule": schedule, - }) - - // Create a new cron scheduler - c := cron.New() - - // Add the export job to the scheduler - entryID, err := c.AddFunc(schedule, func() { - log.Info("scheduler.job_triggered", map[string]interface{}{ - "schedule": schedule, - "export_type": exportType, - "export_mode": exportMode, - "timestamp": time.Now().Format(time.RFC3339), - }) - - // Run the export with additional logging - log.Info("scheduler.starting_export_execution", map[string]interface{}{ - "export_type": exportType, - "export_mode": exportMode, - }) - - startTime := time.Now() - runExportOnce(cfg, log, exportType, exportMode) - duration := time.Since(startTime) - - // Get next run time for display - entries := c.Entries() - var nextRunDisplay string - if len(entries) > 0 { - nextRun := entries[0].Next.In(configuredTZ) - nextRunDisplay = nextRun.Format("2006-01-02 15:04:05 MST") - } - - log.Info("scheduler.export_execution_completed", map[string]interface{}{ - "export_type": exportType, - "export_mode": exportMode, - "duration": duration.String(), - "next_run": nextRunDisplay, - }) - - // Display visual completion message with next run - fmt.Printf("\n✅ === EXPORT COMPLETED ===\n") - fmt.Printf("⏱️ Duration: %s\n", duration.String()) - fmt.Printf("▶️ Next run: %s\n", nextRunDisplay) - fmt.Printf("============================\n\n") - }) - - if err != nil { - log.Error("errors.scheduler_add_failed", map[string]interface{}{ - "schedule": schedule, - "error": err.Error(), - }) - fmt.Printf("Failed to add scheduled job: %s\n", err.Error()) - os.Exit(1) - } - - log.Info("scheduler.job_added_successfully", map[string]interface{}{ - "entry_id": entryID, - "schedule": schedule, - }) - - // Start the cron scheduler - c.Start() - log.Info("scheduler.cron_started", nil) - - // Get the next run time and display in configured timezone - entries := c.Entries() - if len(entries) > 0 { - nextRun := entries[0].Next - nextRunInTZ := nextRun.In(configuredTZ) - - log.Info("scheduler.started", map[string]interface{}{ - "schedule": schedule, - "entry_id": entryID, - "next_run": nextRun.Format(time.RFC3339), - "next_run_local": nextRunInTZ.Format("2006-01-02 15:04:05 MST"), - "timezone": configuredTZ.String(), - }) - fmt.Printf("\n🎯 === EXPORT SCHEDULER STARTED ===\n") - fmt.Printf("⏰ Schedule: %s\n", schedule) - fmt.Printf("📺 Export Type: %s\n", exportType) - fmt.Printf("🔧 Export Mode: %s\n", exportMode) - fmt.Printf("🌍 Timezone: %s\n", configuredTZ.String()) - fmt.Printf("▶️ Next run: %s\n", nextRunInTZ.Format("2006-01-02 15:04:05 MST")) - fmt.Printf("=====================================\n\n") - - // Log upcoming executions for the next hour in configured timezone - now := time.Now() - oneHourLater := now.Add(time.Hour) - log.Info("scheduler.upcoming_executions_preview", map[string]interface{}{ - "next_hour_from": now.Format(time.RFC3339), - "next_hour_to": oneHourLater.Format(time.RFC3339), - "timezone": configuredTZ.String(), - }) - - count := 0 - if len(entries) > 0 { - entry := entries[0] - nextExec := entry.Next - for nextExec.Before(oneHourLater) && count < 10 { - nextExecInTZ := nextExec.In(configuredTZ) - log.Info("scheduler.upcoming_execution", map[string]interface{}{ - "execution_time": nextExecInTZ.Format("2006-01-02 15:04:05 MST"), - "in_minutes": int(time.Until(nextExec).Minutes()), - "timezone": configuredTZ.String(), - }) - // Calculate next execution after this one - schedule, _ := cronParser.Parse(schedule) - nextExec = schedule.Next(nextExec) - count++ - } - } - } - - // Keep the program running until interrupted - log.Info("scheduler.waiting", map[string]interface{}{ - "message": "Scheduler is running. Press Ctrl+C to stop.", - "pid": os.Getpid(), - }) - fmt.Println("Scheduler is running. Press Ctrl+C to stop...") - - // Set up signal handling for graceful shutdown - sigChan := make(chan os.Signal, 1) - signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM) - - go func() { - sig := <-sigChan - log.Info("scheduler.shutdown_signal_received", map[string]interface{}{ - "signal": sig.String(), - }) - fmt.Printf("\nReceived signal %s, shutting down gracefully...\n", sig) - c.Stop() - log.Info("scheduler.shutdown_complete", nil) - os.Exit(0) - }() - - // Block forever (or until SIGINT/SIGTERM) - select {} -} \ No newline at end of file diff --git a/config/config.example.toml b/config/.config.example.toml old mode 100644 new mode 100755 similarity index 99% rename from config/config.example.toml rename to config/.config.example.toml index 57c5dc2..24c329e --- a/config/config.example.toml +++ b/config/.config.example.toml @@ -92,4 +92,4 @@ locales_dir = "locales" # 📖 For more options, check the project documentation # 🐛 Issues? Report them at: https://github.com/JohanDevl/Export_Trakt_4_Letterboxd/issues # -# ═══════════════════════════════════════════════════════════════════════════════ \ No newline at end of file +# ═══════════════════════════════════════════════════════════════════════════════ diff --git a/coverage.html b/coverage.html deleted file mode 100644 index e76d7e9..0000000 --- a/coverage.html +++ /dev/null @@ -1,819 +0,0 @@ - - - - - - export_trakt: Go Coverage Report - - - -
- -
- not tracked - - not covered - covered - -
-
-
- - - - - - - - - - - - - -
- - - diff --git a/coverage.out b/coverage.out deleted file mode 100644 index e352e4c..0000000 --- a/coverage.out +++ /dev/null @@ -1,396 +0,0 @@ -mode: set -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:59.47,61.58 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:61.58,63.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:65.2,65.42 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:65.42,67.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:69.2,69.21 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:73.35,74.43 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:74.43,76.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:78.2,78.48 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:78.48,80.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:82.2,82.44 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:82.44,84.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:86.2,86.45 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:86.45,88.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:90.2,90.42 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:90.42,92.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:94.2,94.12 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:98.40,99.24 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:99.24,101.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:102.2,102.12 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:106.45,107.23 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:107.23,109.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:110.2,110.12 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:114.41,115.20 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:115.20,117.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:118.2,118.24 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:118.24,120.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:122.2,122.12 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:126.42,127.19 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:127.19,129.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:130.2,136.27 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:136.27,138.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:139.2,139.12 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:143.39,144.29 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:144.29,146.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:147.2,147.22 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:147.22,149.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:150.2,150.24 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:150.24,152.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:153.2,153.12 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:36.25,47.2 5 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:50.54,52.2 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:55.90,57.25 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:57.25,59.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:62.2,62.65 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:62.65,64.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:67.2,67.17 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:67.17,69.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:71.2,71.48 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:75.81,77.19 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:77.19,79.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:80.2,80.53 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:84.79,86.2 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:89.82,91.19 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:91.19,93.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:94.2,94.54 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:98.80,100.2 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:103.81,105.19 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:105.19,107.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:108.2,108.53 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:112.79,114.2 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:117.82,119.19 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:119.19,121.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:122.2,122.54 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:126.80,128.2 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:131.52,132.15 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:133.15,134.32 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:135.14,136.31 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:137.14,138.31 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:139.15,140.32 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:141.10,142.31 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:147.56,149.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:149.16,151.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:152.2,153.12 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:24.84,34.45 4 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:34.45,36.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:38.2,39.15 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:43.47,49.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:49.16,51.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:53.2,57.32 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:57.32,58.20 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:58.20,59.12 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:62.3,62.44 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:62.44,63.12 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:66.3,67.59 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:67.59,72.12 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:75.3,77.5 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:80.2,80.12 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:84.94,86.21 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:86.21,88.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:91.2,93.49 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:93.49,95.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:98.2,108.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:108.16,115.3 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:118.2,118.30 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:118.30,122.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:124.2,124.20 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:128.47,134.2 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:132.63,140.2 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:143.73,145.52 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:145.52,146.18 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:146.18,152.4 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:154.3,155.17 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:155.17,157.12 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:161.3,161.29 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:161.29,164.12 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:167.3,167.19 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:170.2,170.61 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:174.58,175.39 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:175.39,177.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:179.2,180.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:180.16,185.3 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:187.2,190.25 4 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:194.54,197.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:197.16,202.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:205.2,211.16 6 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:211.16,216.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:217.2,220.68 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:220.68,222.22 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:222.22,226.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:230.2,230.38 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:230.38,232.71 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:232.71,234.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:235.3,239.103 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:243.2,244.67 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:244.67,249.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:251.2,251.20 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:255.67,258.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:258.16,263.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:266.2,272.16 6 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:272.16,277.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:278.2,281.68 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:281.68,283.22 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:283.22,287.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:291.2,291.38 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:291.38,293.71 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:293.71,295.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:296.3,300.103 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:304.2,305.67 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:305.67,310.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:312.2,315.20 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:319.59,322.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:322.16,327.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:330.2,336.16 6 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:336.16,341.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:342.2,345.68 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:345.68,347.22 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:347.22,351.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:355.2,355.38 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:355.38,357.71 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:357.71,359.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:360.3,364.103 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:368.2,369.66 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:369.66,374.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:376.2,379.19 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:412.49,415.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:415.16,420.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:423.2,429.16 6 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:429.16,434.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:435.2,438.68 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:438.68,440.22 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:440.22,444.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:448.2,448.38 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:448.38,450.71 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:450.71,452.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:453.3,457.103 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:461.2,462.68 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:462.68,467.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:469.2,472.21 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:476.59,479.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:479.16,484.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:487.2,493.16 6 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:493.16,498.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:499.2,502.68 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:502.68,504.22 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:504.22,508.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:512.2,512.38 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:512.38,514.71 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:514.71,516.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:517.3,521.103 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:525.2,526.70 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:526.70,531.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:533.2,536.23 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:540.57,543.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:543.16,548.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:551.2,557.16 6 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:557.16,562.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:563.2,566.68 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:566.68,568.22 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:568.22,572.4 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:576.2,576.38 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:576.38,578.71 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:578.71,580.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:581.3,585.103 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:589.2,590.68 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:590.68,595.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:597.2,600.21 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:604.63,607.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:607.16,612.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:615.2,621.16 6 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:621.16,626.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:627.2,630.68 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:630.68,632.22 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:632.22,636.4 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:640.2,640.38 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:640.38,642.71 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:642.71,644.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:645.3,649.103 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:653.2,654.68 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:654.68,659.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:661.2,664.21 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:668.45,670.2 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:25.87,30.2 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:33.66,37.36 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:37.36,42.3 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:45.2,46.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:46.16,52.3 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:55.2,59.20 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:63.61,66.41 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:66.41,77.34 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:77.34,79.4 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:83.2,83.15 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:83.15,85.74 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:85.74,91.4 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:93.3,97.44 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:101.2,110.53 4 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:110.53,116.3 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:118.2,122.23 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:126.54,127.36 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:127.36,128.34 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:128.34,130.4 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:132.2,132.14 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:136.69,139.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:139.16,141.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:144.2,148.47 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:148.47,150.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:150.8,150.22 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:150.22,153.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:153.8,159.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:160.2,164.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:164.16,170.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:171.2,178.45 5 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:178.45,180.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:183.2,184.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:184.16,188.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:191.2,192.33 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:192.33,194.34 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:194.34,197.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:201.2,205.47 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:205.47,210.33 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:210.33,212.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:213.3,213.18 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:213.18,215.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:216.3,216.18 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:216.18,218.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:221.3,221.28 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:225.2,225.37 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:225.37,228.32 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:228.32,229.84 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:229.84,231.5 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:235.3,236.62 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:236.62,238.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:241.3,242.22 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:242.22,244.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:246.3,255.46 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:255.46,257.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:260.2,264.12 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:268.89,271.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:271.16,273.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:276.2,280.50 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:280.50,282.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:282.8,282.22 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:282.22,285.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:285.8,291.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:292.2,295.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:295.16,301.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:302.2,309.45 5 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:309.45,311.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:314.2,314.31 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:314.31,317.30 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:317.30,318.82 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:318.82,320.5 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:323.3,330.46 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:330.46,332.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:335.2,339.12 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:343.73,346.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:346.16,348.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:351.2,352.45 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:352.45,354.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:354.8,360.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:361.2,364.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:364.16,369.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:370.2,377.45 5 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:377.45,379.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:382.2,385.29 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:385.29,386.39 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:386.39,387.44 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:387.44,388.28 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:388.28,390.21 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:393.5,394.24 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:394.24,395.21 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:401.2,401.19 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:401.19,405.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:408.2,409.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:409.16,413.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:417.2,418.35 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:418.35,419.75 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:419.75,422.4 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:426.2,427.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:427.16,431.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:434.2,435.32 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:435.32,436.27 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:436.27,438.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:442.2,443.29 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:443.29,444.39 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:444.39,445.44 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:445.44,448.33 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:448.33,449.85 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:449.85,451.7 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:455.5,457.51 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:457.51,459.6 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:459.11,459.71 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:459.71,462.6 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:464.5,475.48 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:475.48,477.6 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:478.5,478.19 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:483.2,488.12 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:492.72,495.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:495.16,497.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:500.2,504.47 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:504.47,506.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:506.8,506.22 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:506.22,509.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:509.8,515.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:516.2,519.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:519.16,525.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:526.2,533.45 5 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:533.45,535.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:538.2,538.28 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:538.28,541.22 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:541.22,542.74 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:542.74,544.5 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:548.3,549.19 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:549.19,551.4 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:553.3,561.46 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:561.46,563.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:566.2,570.12 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:574.84,577.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:577.16,579.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:582.2,586.49 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:586.49,588.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:588.8,588.22 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:588.22,591.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:591.8,597.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:598.2,601.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:601.16,607.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:608.2,615.45 5 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:615.45,617.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:620.2,620.31 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:620.31,623.24 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:623.24,624.76 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:624.76,626.5 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:629.3,637.46 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:637.46,639.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:642.2,646.12 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:652.101,655.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:655.16,657.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:660.2,664.56 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:664.56,666.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:666.8,666.22 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:666.22,669.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:669.8,671.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:672.2,675.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:675.16,681.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:682.2,689.45 5 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:689.45,691.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:694.2,695.33 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:695.33,697.34 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:697.34,699.4 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:703.2,704.31 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:704.31,705.33 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:705.33,707.4 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:711.2,711.31 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:711.31,714.32 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:714.32,715.84 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:715.84,717.5 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:721.3,722.62 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:722.62,724.4 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:727.3,728.22 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:728.22,730.4 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:733.3,745.46 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:745.46,747.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:750.2,754.12 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:758.67,762.2 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:765.75,769.2 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:772.81,776.2 2 1 diff --git a/coverage.txt b/coverage.txt deleted file mode 100644 index 696fecc..0000000 --- a/coverage.txt +++ /dev/null @@ -1,396 +0,0 @@ -mode: atomic -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:24.84,34.45 4 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:34.45,36.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:38.2,39.15 2 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:43.47,49.16 3 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:49.16,51.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:53.2,57.32 2 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:57.32,58.20 1 5 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:58.20,59.12 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:62.3,62.44 1 5 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:62.44,63.12 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:66.3,67.59 2 5 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:67.59,72.12 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:75.3,77.5 1 5 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:80.2,80.12 1 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:84.94,86.21 1 5 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:86.21,88.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:91.2,93.49 1 5 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:93.49,95.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:98.2,108.16 3 5 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:108.16,115.3 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:118.2,118.30 1 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:118.30,122.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:124.2,124.20 1 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/i18n/i18n.go:128.47,134.2 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:59.47,61.58 2 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:61.58,63.3 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:65.2,65.42 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:65.42,67.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:69.2,69.21 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:73.35,74.43 1 6 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:74.43,76.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:78.2,78.48 1 5 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:78.48,80.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:82.2,82.44 1 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:82.44,84.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:86.2,86.45 1 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:86.45,88.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:90.2,90.42 1 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:90.42,92.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:94.2,94.12 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:98.40,99.24 1 6 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:99.24,101.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:102.2,102.12 1 5 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:106.45,107.23 1 5 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:107.23,109.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:110.2,110.12 1 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:114.41,115.20 1 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:115.20,117.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:118.2,118.24 1 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:118.24,120.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:122.2,122.12 1 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:126.42,127.19 1 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:127.19,129.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:130.2,136.27 2 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:136.27,138.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:139.2,139.12 1 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:143.39,144.29 1 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:144.29,146.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:147.2,147.22 1 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:147.22,149.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:150.2,150.24 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:150.24,152.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config/config.go:153.2,153.12 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:36.25,47.2 5 12 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:50.54,52.2 1 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:55.90,57.25 1 14 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:57.25,59.3 1 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:62.2,62.65 1 11 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:62.65,64.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:67.2,67.17 1 11 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:67.17,69.3 1 6 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:71.2,71.48 1 11 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:75.81,77.19 2 5 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:77.19,79.3 1 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:80.2,80.53 1 5 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:84.79,86.2 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:89.82,91.19 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:91.19,93.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:94.2,94.54 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:98.80,100.2 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:103.81,105.19 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:105.19,107.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:108.2,108.53 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:112.79,114.2 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:117.82,119.19 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:119.19,121.3 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:122.2,122.54 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:126.80,128.2 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:131.52,132.15 1 9 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:133.15,134.32 1 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:135.14,136.31 1 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:137.14,138.31 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:139.15,140.32 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:141.10,142.31 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:147.56,149.16 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:149.16,151.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger/logger.go:152.2,153.12 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:132.63,140.2 1 24 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:143.73,145.52 2 21 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:145.52,146.18 1 23 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:146.18,152.4 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:154.3,155.17 2 23 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:155.17,157.12 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:161.3,161.29 1 23 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:161.29,164.12 3 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:167.3,167.19 1 21 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:170.2,170.61 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:174.58,175.39 1 24 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:175.39,177.3 1 22 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:179.2,180.16 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:180.16,185.3 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:187.2,190.25 4 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:194.54,197.16 3 9 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:197.16,202.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:205.2,211.16 6 9 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:211.16,216.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:217.2,220.68 2 9 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:220.68,222.22 2 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:222.22,226.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:230.2,230.38 1 9 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:230.38,232.71 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:232.71,234.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:235.3,239.103 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:243.2,244.67 2 8 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:244.67,249.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:251.2,251.20 1 8 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:255.67,258.16 3 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:258.16,263.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:266.2,272.16 6 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:272.16,277.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:278.2,281.68 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:281.68,283.22 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:283.22,287.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:291.2,291.38 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:291.38,293.71 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:293.71,295.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:296.3,300.103 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:304.2,305.67 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:305.67,310.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:312.2,315.20 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:319.59,322.16 3 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:322.16,327.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:330.2,336.16 6 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:336.16,341.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:342.2,345.68 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:345.68,347.22 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:347.22,351.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:355.2,355.38 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:355.38,357.71 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:357.71,359.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:360.3,364.103 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:368.2,369.66 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:369.66,374.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:376.2,379.19 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:412.49,415.16 3 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:415.16,420.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:423.2,429.16 6 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:429.16,434.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:435.2,438.68 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:438.68,440.22 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:440.22,444.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:448.2,448.38 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:448.38,450.71 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:450.71,452.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:453.3,457.103 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:461.2,462.68 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:462.68,467.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:469.2,472.21 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:476.59,479.16 3 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:479.16,484.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:487.2,493.16 6 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:493.16,498.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:499.2,502.68 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:502.68,504.22 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:504.22,508.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:512.2,512.38 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:512.38,514.71 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:514.71,516.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:517.3,521.103 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:525.2,526.70 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:526.70,531.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:533.2,536.23 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:540.57,543.16 3 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:543.16,548.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:551.2,557.16 6 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:557.16,562.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:563.2,566.68 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:566.68,568.22 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:568.22,572.4 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:576.2,576.38 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:576.38,578.71 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:578.71,580.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:581.3,585.103 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:589.2,590.68 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:590.68,595.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:597.2,600.21 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:604.63,607.16 3 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:607.16,612.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:615.2,621.16 6 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:621.16,626.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:627.2,630.68 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:630.68,632.22 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:632.22,636.4 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:640.2,640.38 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:640.38,642.71 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:642.71,644.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:645.3,649.103 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:653.2,654.68 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:654.68,659.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:661.2,664.21 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api/trakt.go:668.45,670.2 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:25.87,30.2 1 11 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:33.66,37.36 2 4 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:37.36,42.3 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:45.2,46.16 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:46.16,52.3 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:55.2,59.20 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:63.61,66.41 2 7 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:66.41,77.34 2 7 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:77.34,79.4 1 6 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:83.2,83.15 1 7 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:83.15,85.74 1 6 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:85.74,91.4 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:93.3,97.44 2 6 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:101.2,110.53 4 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:110.53,116.3 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:118.2,122.23 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:126.54,127.36 1 6 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:127.36,128.34 1 12 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:128.34,130.4 1 5 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:132.2,132.14 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:136.69,139.16 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:139.16,141.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:144.2,148.47 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:148.47,150.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:150.8,150.22 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:150.22,153.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:153.8,159.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:160.2,164.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:164.16,170.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:171.2,178.45 5 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:178.45,180.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:183.2,184.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:184.16,188.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:191.2,192.33 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:192.33,194.34 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:194.34,197.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:201.2,205.47 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:205.47,210.33 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:210.33,212.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:213.3,213.18 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:213.18,215.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:216.3,216.18 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:216.18,218.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:221.3,221.28 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:225.2,225.37 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:225.37,228.32 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:228.32,229.84 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:229.84,231.5 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:235.3,236.62 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:236.62,238.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:241.3,242.22 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:242.22,244.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:246.3,255.46 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:255.46,257.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:260.2,264.12 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:268.89,271.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:271.16,273.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:276.2,280.50 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:280.50,282.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:282.8,282.22 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:282.22,285.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:285.8,291.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:292.2,295.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:295.16,301.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:302.2,309.45 5 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:309.45,311.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:314.2,314.31 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:314.31,317.30 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:317.30,318.82 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:318.82,320.5 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:323.3,330.46 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:330.46,332.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:335.2,339.12 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:343.73,346.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:346.16,348.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:351.2,352.45 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:352.45,354.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:354.8,360.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:361.2,364.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:364.16,369.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:370.2,377.45 5 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:377.45,379.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:382.2,385.29 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:385.29,386.39 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:386.39,387.44 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:387.44,388.28 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:388.28,390.21 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:393.5,394.24 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:394.24,395.21 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:401.2,401.19 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:401.19,405.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:408.2,409.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:409.16,413.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:417.2,418.35 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:418.35,419.75 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:419.75,422.4 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:426.2,427.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:427.16,431.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:434.2,435.32 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:435.32,436.27 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:436.27,438.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:442.2,443.29 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:443.29,444.39 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:444.39,445.44 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:445.44,448.33 2 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:448.33,449.85 1 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:449.85,451.7 1 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:455.5,457.51 3 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:457.51,459.6 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:459.11,459.71 1 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:459.71,462.6 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:464.5,475.48 2 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:475.48,477.6 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:478.5,478.19 1 3 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:483.2,488.12 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:492.72,495.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:495.16,497.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:500.2,504.47 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:504.47,506.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:506.8,506.22 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:506.22,509.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:509.8,515.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:516.2,519.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:519.16,525.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:526.2,533.45 5 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:533.45,535.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:538.2,538.28 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:538.28,541.22 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:541.22,542.74 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:542.74,544.5 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:548.3,549.19 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:549.19,551.4 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:553.3,561.46 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:561.46,563.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:566.2,570.12 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:574.84,577.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:577.16,579.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:582.2,586.49 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:586.49,588.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:588.8,588.22 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:588.22,591.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:591.8,597.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:598.2,601.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:601.16,607.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:608.2,615.45 5 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:615.45,617.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:620.2,620.31 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:620.31,623.24 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:623.24,624.76 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:624.76,626.5 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:629.3,637.46 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:637.46,639.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:642.2,646.12 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:652.101,655.16 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:655.16,657.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:660.2,664.56 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:664.56,666.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:666.8,666.22 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:666.22,669.3 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:669.8,671.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:672.2,675.16 3 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:675.16,681.3 2 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:682.2,689.45 5 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:689.45,691.3 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:694.2,695.33 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:695.33,697.34 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:697.34,699.4 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:703.2,704.31 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:704.31,705.33 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:705.33,707.4 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:711.2,711.31 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:711.31,714.32 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:714.32,715.84 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:715.84,717.5 1 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:721.3,722.62 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:722.62,724.4 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:727.3,728.22 2 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:728.22,730.4 1 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:733.3,745.46 3 2 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:745.46,747.4 1 0 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:750.2,754.12 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:758.67,762.2 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:765.75,769.2 2 1 -github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/export/letterboxd.go:772.81,776.2 2 1 diff --git a/docker-compose.secrets.yml b/docker-compose.secrets.yml new file mode 100644 index 0000000..5249497 --- /dev/null +++ b/docker-compose.secrets.yml @@ -0,0 +1,43 @@ +version: "3.8" + +services: + # Example using Docker Secrets for sensitive configuration + trakt-export-secrets: + image: johandevl/export-trakt-4-letterboxd:latest + container_name: trakt-export-secrets + volumes: + - ./config:/app/config + - ./logs:/app/logs + - ./copy:/app/copy + - ./backup:/app/backup + environment: + - TZ=Europe/Paris + - CRON_SCHEDULE=0 3 * * * + - EXPORT_OPTION=normal + # Set environment variables to read from Docker secrets + - TRAKT_API_KEY_FILE=/run/secrets/trakt_api_key + - TRAKT_API_SECRET_FILE=/run/secrets/trakt_api_secret + - TRAKT_ACCESS_TOKEN_FILE=/run/secrets/trakt_access_token + - TRAKT_REFRESH_TOKEN_FILE=/run/secrets/trakt_refresh_token + - TRAKT_USERNAME=your_username + secrets: + - trakt_api_key + - trakt_api_secret + - trakt_access_token + - trakt_refresh_token + restart: unless-stopped + healthcheck: + test: ["CMD", "/app/docker-entrypoint.sh", "healthcheck"] + interval: 1m + timeout: 10s + retries: 3 + +secrets: + trakt_api_key: + file: ./secrets/api_key.txt + trakt_api_secret: + file: ./secrets/api_secret.txt + trakt_access_token: + file: ./secrets/access_token.txt + trakt_refresh_token: + file: ./secrets/refresh_token.txt diff --git a/docker-compose.yml b/docker-compose.yml index 655e920..17f974a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,207 +1,124 @@ +version: "3.8" + services: - # Base service configuration for PRODUCTION (pulls from registry) - export-trakt-base: &export-trakt-base-prod - image: ghcr.io/johandevl/export_trakt_4_letterboxd:latest - pull_policy: always + # Default service - runs once and exits + trakt-export: + build: + context: . + args: + - APP_VERSION=dev + - BUILD_DATE=${BUILD_DATE:-unknown} + - VCS_REF=${VCS_REF:-unknown} + container_name: trakt-export volumes: - ./config:/app/config - ./logs:/app/logs - - ./exports:/app/exports - restart: "no" + - ./copy:/app/copy + - ./backup:/app/backup environment: - TZ=Europe/Paris - - # Base service configuration for DEVELOPMENT/TEST (local build) - export-trakt-base-dev: &export-trakt-base-dev + # Leave CRON_SCHEDULE empty to run once and exit + - EXPORT_OPTION=complete + restart: "no" + stdin_open: true + tty: true + healthcheck: + test: ["CMD", "/app/docker-entrypoint.sh", "healthcheck"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 5s + + # Service that runs on a schedule using cron + trakt-export-scheduled: + profiles: ["scheduled"] build: context: . - dockerfile: Dockerfile args: - VERSION: local-dev - COMMIT_SHA: local - BUILD_DATE: "2025-05-23T12:30:00Z" + - APP_VERSION=dev + container_name: trakt-export-scheduled volumes: - ./config:/app/config - ./logs:/app/logs - - ./exports:/app/exports - restart: "no" + - ./copy:/app/copy + - ./backup:/app/backup environment: - TZ=Europe/Paris - - # === PRODUCTION SERVICES (using registry image) === - - # Normal export (production) - export-trakt: - <<: *export-trakt-base-prod - profiles: ["default", "export", "prod"] - command: ["export", "--mode", "normal"] - - # Production scheduler - Every 6 hours (recommended) - export-trakt-schedule-6h: - <<: *export-trakt-base-prod - profiles: ["schedule", "schedule-6h", "prod"] - container_name: export-trakt-schedule-6h - restart: unless-stopped - environment: - - TZ=Europe/Paris - - LOG_LEVEL=info - - EXPORT_QUIET_MODE=true - command: - ["--schedule", "0 */6 * * *", "--export", "all", "--mode", "complete"] - - # Production scheduler - Daily at 2:30 AM - export-trakt-schedule-daily: - <<: *export-trakt-base-prod - profiles: ["schedule-daily", "prod"] - container_name: export-trakt-schedule-daily + - CRON_SCHEDULE=* * * * * + - EXPORT_OPTION=complete restart: unless-stopped + healthcheck: + test: ["CMD", "/app/docker-entrypoint.sh", "healthcheck"] + interval: 1m + timeout: 10s + retries: 3 + start_period: 30s + + # Service that runs on a schedule using cron + trakt-export-scheduled-repo: + #image: ghcr.io/johandevl/export_trakt_4_letterboxd:pr-29 + build: + context: . + args: + - APP_VERSION=dev + container_name: trakt-export-scheduled-repo + volumes: + - ./config:/app/config + - ./logs:/app/logs + - ./copy:/app/copy + - ./backup:/app/backup environment: - TZ=Europe/Paris - - LOG_LEVEL=info - - EXPORT_QUIET_MODE=true - command: - ["--schedule", "30 2 * * *", "--export", "all", "--mode", "complete"] - - # Production run once - all data - export-trakt-run-all: - <<: *export-trakt-base-prod - profiles: ["run", "run-all", "prod"] - command: ["--run", "--export", "all", "--mode", "complete"] - - # === DEVELOPMENT/TEST SERVICES (using local build) === - - # Interactive setup (dev) - export-trakt-setup-dev: - <<: *export-trakt-base-dev - profiles: ["setup", "dev"] - command: ["setup"] - tty: true - stdin_open: true - - # Test scheduler - High frequency (every 2 minutes) for testing - export-trakt-schedule-test: - <<: *export-trakt-base-dev - profiles: ["schedule-test", "schedule-15min", "dev"] - container_name: export-trakt-schedule-test + # Run every day at 3 AM + - CRON_SCHEDULE=* * * * * + - EXPORT_OPTION=normal restart: unless-stopped + healthcheck: + test: ["CMD", "/app/docker-entrypoint.sh", "healthcheck"] + interval: 1m + timeout: 10s + retries: 3 + start_period: 30s + + # Example with all configuration passed via environment variables + trakt-export-env: + profiles: ["env-config"] + image: johandevl/export-trakt-4-letterboxd:latest + container_name: trakt-export-env + volumes: + - trakt_logs:/app/logs + - trakt_copy:/app/copy + - trakt_backup:/app/backup environment: - TZ=Europe/Paris - - LOG_LEVEL=info - - EXPORT_QUIET_MODE=true - command: - ["--schedule", "*/2 * * * *", "--export", "watched", "--mode", "normal"] - - # Test run once - watched movies only (dev) - export-trakt-run-watched-dev: - <<: *export-trakt-base-dev - profiles: ["run-watched", "dev"] - command: ["--run", "--export", "watched", "--mode", "normal"] - - # Test run once - all data (dev) - export-trakt-run-all-dev: - <<: *export-trakt-base-dev - profiles: ["run-all", "dev"] - command: ["--run", "--export", "all", "--mode", "complete"] - - # Validate configuration (dev) - export-trakt-validate-dev: - <<: *export-trakt-base-dev - profiles: ["validate", "dev"] - command: ["validate"] - - # === LEGACY SERVICES (for backward compatibility) === - - # Complete export (legacy - production) - export-trakt-complete: - <<: *export-trakt-base-prod - profiles: ["complete", "legacy"] - command: ["export", "--mode", "complete", "--export", "all"] - - # Initial export (legacy - production) - export-trakt-initial: - <<: *export-trakt-base-prod - profiles: ["initial", "legacy"] - command: ["export", "--mode", "initial"] - - # Custom scheduled export (production) - export-trakt-schedule-custom: - <<: *export-trakt-base-prod - profiles: ["schedule-custom", "prod"] - container_name: export-trakt-schedule-custom + # API configuration - replace with your actual values + - TRAKT_API_KEY=${TRAKT_API_KEY} + - TRAKT_API_SECRET=${TRAKT_API_SECRET} + - TRAKT_ACCESS_TOKEN=${TRAKT_ACCESS_TOKEN} + - TRAKT_REFRESH_TOKEN=${TRAKT_REFRESH_TOKEN} + - TRAKT_USERNAME=${TRAKT_USERNAME} + # Schedule and export options + - CRON_SCHEDULE=0 3 * * * + - EXPORT_OPTION=normal restart: unless-stopped + healthcheck: + test: ["CMD", "/app/docker-entrypoint.sh", "healthcheck"] + interval: 1m + timeout: 10s + retries: 3 + + # Setup command - use this to run the initial setup + trakt-setup: + profiles: ["setup"] + build: . + container_name: trakt-setup + volumes: + - ./config:/app/config environment: - TZ=Europe/Paris - - LOG_LEVEL=info - - EXPORT_QUIET_MODE=true - - CUSTOM_SCHEDULE=${SCHEDULE:-0 */6 * * *} - - CUSTOM_EXPORT_TYPE=${EXPORT_TYPE:-all} - - CUSTOM_EXPORT_MODE=${EXPORT_MODE:-complete} - entrypoint: ["/bin/sh", "-c"] - command: - - | - /app/export-trakt --schedule "$${CUSTOM_SCHEDULE}" --export "$${CUSTOM_EXPORT_TYPE}" --mode "$${CUSTOM_EXPORT_MODE}" - -# === USAGE EXAMPLES === -# -# 🚀 PRODUCTION MODE (pulls from GitHub Container Registry): -# -# Run production scheduler (every 6 hours): -# docker compose --profile schedule-6h up -d -# -# Run production daily scheduler (2:30 AM): -# docker compose --profile schedule-daily up -d -# -# Run once - all data (production): -# docker compose --profile run-all up -# -# Custom production schedule: -# SCHEDULE="0 */4 * * *" docker compose --profile schedule-custom up -d -# -# 🛠️ DEVELOPMENT/TEST MODE (builds locally): -# -# Test scheduler (every 2 minutes): -# docker compose --profile dev --profile schedule-test up -d --build -# -# Test run once - watched movies: -# docker compose --profile dev --profile run-watched up --build -# -# Test run once - all data: -# docker compose --profile dev --profile run-all up --build -# -# Interactive setup: -# docker compose --profile dev --profile setup up --build -# -# Validate configuration: -# docker compose --profile dev --profile validate up --build -# -# 🔄 SWITCHING MODES: -# -# Stop test scheduler and start production: -# docker compose --profile dev --profile schedule-test down -# docker compose --profile schedule-6h up -d -# -# Stop production and start development: -# docker compose --profile schedule-6h down -# docker compose --profile dev --profile schedule-test up -d --build -# -# 📊 MONITORING: -# -# Check production logs: -# docker compose --profile schedule-6h logs -f -# -# Check development logs: -# docker compose --profile dev --profile schedule-test logs -f -# -# 🎯 QUICK COMMANDS: -# -# Test your changes: -# docker compose --profile dev --profile run-watched up --build -# -# Deploy to production: -# docker compose --profile schedule-6h up -d -# -# Check status: -# docker compose ps + entrypoint: ["/app/docker-entrypoint.sh", "setup"] + stdin_open: true # Needed for interactive prompts + tty: true # Needed for interactive prompts volumes: trakt_logs: diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh new file mode 100755 index 0000000..61cafb5 --- /dev/null +++ b/docker-entrypoint.sh @@ -0,0 +1,646 @@ +#!/bin/bash +set -e + +# Improved logging function +log_message() { + local level="$1" + local message="$2" + local timestamp=$(date "+%Y-%m-%d %H:%M:%S") + + case "$level" in + "INFO") echo -e "ℹ️ [INFO] $timestamp - $message" ;; + "WARN") echo -e "⚠️ [WARNING] $timestamp - $message" ;; + "ERROR") echo -e "❌ [ERROR] $timestamp - $message" ;; + "DEBUG") echo -e "🔍 [DEBUG] $timestamp - $message" ;; + "SUCCESS") echo -e "✅ [SUCCESS] $timestamp - $message" ;; + esac +} + +# Show version information +show_version() { + log_message "INFO" "Starting Export Trakt 4 Letterboxd container - Version: ${APP_VERSION:-unknown}" +} + +# Health check HTTP server +start_health_server() { + # Check if netcat-openbsd is installed + if ! command -v nc &> /dev/null; then + log_message "WARN" "Netcat not installed. Health server not available. Installing..." + if command -v apk &> /dev/null; then + apk add --no-cache netcat-openbsd + else + log_message "ERROR" "Package manager not found. Cannot install netcat." + return 1 + fi + fi + + # Source the health check script + source /app/lib/health_check.sh + + # Start health check server + log_message "INFO" "Starting health check server on port 8000" + + # Run in background with BusyBox compatible options + ( + while true; do + # For BusyBox nc, we need to use different syntax + # Write the HTTP response to a temporary file first + echo -e "HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n\r\n$(run_health_checks)" > /tmp/health_response + # Start netcat in listen mode + nc -l -p 8000 < /tmp/health_response + # Small delay to avoid CPU spinning + sleep 0.1 + done + ) & + + # Store PID to kill server when container stops + HEALTH_SERVER_PID=$! + log_message "INFO" "Health check server started with PID: $HEALTH_SERVER_PID" + + # Register trap to kill server on exit + trap "log_message 'INFO' 'Stopping health check server'; kill $HEALTH_SERVER_PID 2>/dev/null || true" EXIT INT TERM +} + +# Debug function for file and directory information +debug_file_info() { + local path="$1" + local type="$2" + + if [ -e "$path" ]; then + log_message "DEBUG" "$type exists: $path" + if [ -d "$path" ]; then + log_message "DEBUG" "Directory permissions: $(stat -c '%a %n' "$path" 2>/dev/null || ls -la "$path" | head -n 1)" + log_message "DEBUG" "Owner/Group: $(stat -c '%U:%G' "$path" 2>/dev/null || ls -la "$path" | head -n 1 | awk '{print $3":"$4}')" + log_message "DEBUG" "Content count: $(ls -la "$path" | wc -l) items" + elif [ -f "$path" ]; then + log_message "DEBUG" "File permissions: $(stat -c '%a %n' "$path" 2>/dev/null || ls -la "$path" | head -n 1)" + log_message "DEBUG" "Owner/Group: $(stat -c '%U:%G' "$path" 2>/dev/null || ls -la "$path" | head -n 1 | awk '{print $3":"$4}')" + log_message "DEBUG" "File size: $(stat -c '%s' "$path" 2>/dev/null || ls -la "$path" | awk '{print $5}') bytes" + + if [ -s "$path" ]; then + log_message "DEBUG" "File has content" + else + log_message "WARN" "File is empty" + fi + + if [ -r "$path" ]; then + log_message "DEBUG" "File is readable" + else + log_message "ERROR" "File is not readable" + fi + + if [ -w "$path" ]; then + log_message "DEBUG" "File is writable" + else + log_message "ERROR" "File is not writable" + fi + fi + else + log_message "ERROR" "$type does not exist: $path" + log_message "DEBUG" "Parent directory exists: $(if [ -d "$(dirname "$path")" ]; then echo "Yes"; else echo "No"; fi)" + if [ -d "$(dirname "$path")" ]; then + log_message "DEBUG" "Parent directory permissions: $(stat -c '%a %n' "$(dirname "$path")" 2>/dev/null || ls -la "$(dirname "$path")" | head -n 1)" + fi + fi +} + +# Function to clean temporary directories safely +clean_temp_directories() { + log_message "INFO" "Cleaning temporary directories..." + + # Define temp directories to clean + TEMP_DIRS=("/app/TEMP") + + for dir in "${TEMP_DIRS[@]}"; do + if [ -d "$dir" ]; then + log_message "DEBUG" "Cleaning directory: $dir" + + # First try with current permissions + if find "$dir" -mindepth 1 -delete 2>/dev/null; then + log_message "SUCCESS" "Cleaned $dir successfully" + else + log_message "WARN" "Permission issues cleaning $dir, attempting with elevated permissions" + + # Try to make the directory writable if needed + chmod -R 777 "$dir" 2>/dev/null || true + find "$dir" -mindepth 1 -delete 2>/dev/null || log_message "ERROR" "Failed to clean $dir completely" + + # Make sure the directory exists and has correct permissions + mkdir -p "$dir" 2>/dev/null || true + chmod -R 777 "$dir" 2>/dev/null || true + fi + else + log_message "WARN" "Directory $dir does not exist, creating it" + mkdir -p "$dir" 2>/dev/null || log_message "ERROR" "Failed to create $dir" + chmod -R 777 "$dir" 2>/dev/null || log_message "ERROR" "Failed to set permissions on $dir" + fi + done +} + +# Function to sync environment variables with config file +sync_env_to_config() { + local config_file="/app/config/.config.cfg" + + log_message "INFO" "Checking for environment variables to sync to config..." + + # Check if config file is writable + if [ ! -w "$config_file" ]; then + log_message "WARN" "Config file is not writable: $config_file" + log_message "INFO" "Attempting to make config file writable" + chmod 666 "$config_file" 2>/dev/null || log_message "ERROR" "Failed to make config file writable" + fi + + # Re-check if it's writable + if [ ! -w "$config_file" ]; then + log_message "ERROR" "Cannot write to config file: $config_file" + log_message "INFO" "Will use environment variables directly without updating config file" + return 1 + fi + + # Create a temp file for safer editing + local temp_config="/tmp/config.tmp" + cp "$config_file" "$temp_config" + + # List of environment variables to check and sync + declare -A env_vars + env_vars[TRAKT_API_KEY]="API_KEY" + env_vars[TRAKT_API_SECRET]="API_SECRET" + env_vars[TRAKT_USERNAME]="USERNAME" + + # Special handling for tokens - only update if they are empty in the config + # Get current values from config using grep with awk which is more compatible + current_access_token=$(grep "^ACCESS_TOKEN=" "$config_file" | awk -F '"' '{print $2}' || echo "") + current_refresh_token=$(grep "^REFRESH_TOKEN=" "$config_file" | awk -F '"' '{print $2}' || echo "") + + # Only update tokens if they are empty in the config + if [ -z "$current_access_token" ] && [ -n "$TRAKT_ACCESS_TOKEN" ]; then + log_message "INFO" "Setting ACCESS_TOKEN from environment variable" + sed -i 's|^ACCESS_TOKEN=.*|ACCESS_TOKEN="'"$TRAKT_ACCESS_TOKEN"'"|' "$temp_config" + fi + + if [ -z "$current_refresh_token" ] && [ -n "$TRAKT_REFRESH_TOKEN" ]; then + log_message "INFO" "Setting REFRESH_TOKEN from environment variable" + sed -i 's|^REFRESH_TOKEN=.*|REFRESH_TOKEN="'"$TRAKT_REFRESH_TOKEN"'"|' "$temp_config" + fi + + # Check each environment variable (except tokens which are handled above) + for env_var in "${!env_vars[@]}"; do + config_var="${env_vars[$env_var]}" + + # If environment variable is set, update config + if [ -n "${!env_var}" ]; then + log_message "INFO" "Setting $config_var from environment variable $env_var" + + if grep -q "^$config_var=" "$temp_config"; then + # Update existing variable - preserve format, just update value + sed -i "s|^$config_var=.*|$config_var=\"${!env_var}\"|" "$temp_config" + else + # Add new variable (should rarely happen) + echo "$config_var=\"${!env_var}\"" >> "$temp_config" + fi + fi + done + + # Also check for environment variables with _FILE suffix for Docker secrets + # Special handling for token secrets + if [ -n "$TRAKT_ACCESS_TOKEN_FILE" ] && [ -f "$TRAKT_ACCESS_TOKEN_FILE" ] && [ -z "$current_access_token" ]; then + secret_value=$(cat "$TRAKT_ACCESS_TOKEN_FILE" 2>/dev/null | tr -d '\n') + if [ -n "$secret_value" ]; then + log_message "INFO" "Setting ACCESS_TOKEN from secret file" + sed -i 's|^ACCESS_TOKEN=.*|ACCESS_TOKEN="'"$secret_value"'"|' "$temp_config" + fi + fi + + if [ -n "$TRAKT_REFRESH_TOKEN_FILE" ] && [ -f "$TRAKT_REFRESH_TOKEN_FILE" ] && [ -z "$current_refresh_token" ]; then + secret_value=$(cat "$TRAKT_REFRESH_TOKEN_FILE" 2>/dev/null | tr -d '\n') + if [ -n "$secret_value" ]; then + log_message "INFO" "Setting REFRESH_TOKEN from secret file" + sed -i 's|^REFRESH_TOKEN=.*|REFRESH_TOKEN="'"$secret_value"'"|' "$temp_config" + fi + fi + + # For other secrets + for env_var in "${!env_vars[@]}"; do + secret_env_var="${env_var}_FILE" + config_var="${env_vars[$env_var]}" + + # If secret file environment variable is set + if [ -n "${!secret_env_var}" ] && [ -f "${!secret_env_var}" ]; then + # Read the secret from file + secret_value=$(cat "${!secret_env_var}" 2>/dev/null | tr -d '\n') + + if [ -n "$secret_value" ]; then + log_message "INFO" "Setting $config_var from secret file $secret_env_var" + + if grep -q "^$config_var=" "$temp_config"; then + # Update existing variable + sed -i "s|^$config_var=.*|$config_var=\"$secret_value\"|" "$temp_config" + else + # Add new variable + echo "$config_var=\"$secret_value\"" >> "$temp_config" + fi + else + log_message "WARN" "Secret file for $env_var is empty, skipping" + fi + fi + done + + # Copy the temp file back to the actual config + if ! cp "$temp_config" "$config_file"; then + log_message "ERROR" "Failed to update config file from temp file" + log_message "DEBUG" "Temp file: $(cat "$temp_config")" + return 1 + fi + + log_message "SUCCESS" "Config file updated with environment variables" + rm -f "$temp_config" + return 0 +} + +# Initial system information +log_message "INFO" "Starting Docker container for Export_Trakt_4_Letterboxd" +show_version +log_message "DEBUG" "Container environment:" +log_message "DEBUG" "User: $(id)" +log_message "DEBUG" "Working directory: $(pwd)" +log_message "DEBUG" "Environment variables:" +log_message "DEBUG" "- TZ: ${TZ:-Not set}" +log_message "DEBUG" "- CRON_SCHEDULE: ${CRON_SCHEDULE:-Not set}" +log_message "DEBUG" "- EXPORT_OPTION: ${EXPORT_OPTION:-Not set}" +log_message "DEBUG" "- LIMIT_FILMS: ${LIMIT_FILMS:-Not set}" + +# Create config directory if it doesn't exist +mkdir -p /app/config + +# Create example config file if it doesn't exist +if [ ! -f /app/config/.config.cfg.example ]; then + echo "Creating example config file in config directory..." + cat > /app/config/.config.cfg.example << 'EOF' +############################################################################ +# TRAKT API CONFIGURATION +############################################################################ +# API credentials - Get these from https://trakt.tv/oauth/applications +API_KEY="YOUR_API_KEY_HERE" +API_SECRET="YOUR_API_SECRET_HERE" +API_URL="https://api.trakt.tv" + +# Authentication tokens - Generated by setup_trakt.sh +ACCESS_TOKEN="" +REFRESH_TOKEN="" +REDIRECT_URI="urn:ietf:wg:oauth:2.0:oob" + +# User information +USERNAME="YOUR_TRAKT_USERNAME" + +############################################################################ +# DIRECTORY PATHS +############################################################################ +# Backup and output directories +BACKUP_DIR="./backup" +DOSLOG="./logs" +DOSCOPY="./copy" +CONFIG_DIR="./config" + +# Date format for filenames +DATE=$(date +%Y%m%d_%H%M) +LOG="${DOSLOG}/Export_Trakt_4_Letterboxd_$(date '+%Y-%m-%d_%H-%M-%S').log" + +############################################################################ +# DISPLAY SETTINGS +############################################################################ +# Terminal colors +RED='\033[0;31m' # Color code for error messages +GREEN='\033[0;32m' # Color code for success messages +NC='\033[0m' # No Color +BOLD='\033[1m' # Code for bold text +SAISPAS='\e[1;33;41m' # Background color code: 1;33 for yellow, 44 for red +EOF + echo "Example config file created at /app/config/.config.cfg.example" +fi + +# Check if config file exists +if [ ! -f /app/config/.config.cfg ]; then + echo "Config file not found. Creating from template..." + cp /app/config/.config.cfg.example /app/config/.config.cfg + echo "Please edit /app/config/.config.cfg with your Trakt API credentials." +fi + +# Function to verify and add missing variables to the config file +verify_config_variables() { + local config_file="/app/config/.config.cfg" + local example_file="/app/config/.config.cfg.example" + local missing_vars=0 + local added_vars=0 + + log_message "INFO" "Verifying configuration variables..." + + # Create a temporary file to store the list of required variables + cat > /tmp/required_vars.txt << 'EOF' +API_KEY +API_SECRET +API_URL +ACCESS_TOKEN +REFRESH_TOKEN +REDIRECT_URI +USERNAME +BACKUP_DIR +DOSLOG +DOSCOPY +CONFIG_DIR +DATE +LOG +RED +GREEN +NC +BOLD +SAISPAS +EOF + + # Check each required variable + while IFS= read -r var; do + if ! grep -q "^${var}=" "$config_file"; then + log_message "WARN" "Missing variable: ${var}" + missing_vars=$((missing_vars + 1)) + + # Extract the variable definition from the example file + var_line=$(grep "^${var}=" "$example_file") + + if [ -n "$var_line" ]; then + # Add the variable to the config file + echo "$var_line" >> "$config_file" + added_vars=$((added_vars + 1)) + log_message "INFO" "Added ${var} to config file" + else + log_message "ERROR" "Could not find ${var} in example file" + fi + fi + done < /tmp/required_vars.txt + + # Clean up temporary files + rm -f /tmp/required_vars.txt + + # Report results + if [ $missing_vars -eq 0 ]; then + log_message "SUCCESS" "All required variables are present in the config file." + else + if [ $added_vars -eq $missing_vars ]; then + log_message "SUCCESS" "Added $added_vars missing variables to the config file." + else + log_message "WARN" "Found $missing_vars missing variables, but could only add $added_vars." + log_message "WARN" "Please check your config file manually." + fi + fi +} + +# Remove any existing symlink or config file in the root directory +if [ -L /app/.config.cfg ] || [ -f /app/.config.cfg ]; then + log_message "INFO" "Removing old config file from root directory" + rm -f /app/.config.cfg + log_message "SUCCESS" "Removed old config file from root directory" +fi + +# Create necessary directories with proper permissions +log_message "INFO" "Creating necessary directories with proper permissions" +mkdir -p /app/logs /app/copy /app/backup /app/TEMP +chmod -R 777 /app/logs /app/copy /app/backup /app/TEMP /app/config +log_message "SUCCESS" "Directories created with permissions 777" + +# Debug directory information +debug_file_info "/app/logs" "Logs directory" +debug_file_info "/app/copy" "Copy directory" +debug_file_info "/app/backup" "Backup directory" +debug_file_info "/app/TEMP" "Temp directory" +debug_file_info "/app/config" "Config directory" +debug_file_info "/app/lib" "Library directory" + +# Verify config file variables +verify_config_variables + +# Check if Trakt API credentials are set +if grep -q '^API_KEY="YOUR_API_KEY_HERE"' /app/config/.config.cfg || \ + grep -q '^API_SECRET="YOUR_API_SECRET_HERE"' /app/config/.config.cfg; then + log_message "WARN" "API credentials not configured in .config.cfg" + log_message "INFO" "Please edit /app/config/.config.cfg with your Trakt API credentials" + log_message "INFO" "You can get API credentials at https://trakt.tv/oauth/applications" +fi + +# Main entry point - Add this at the end of the file +if [ "$1" = "healthcheck" ]; then + # Just run the health check and exit + source /app/lib/health_check.sh + run_health_checks + exit $? +elif [ "$1" = "setup" ]; then + # Run the setup script + exec /app/setup_trakt.sh +else + # Clean temporary directories before starting + clean_temp_directories + + # Sync environment variables to config file + sync_env_to_config + + # Start health check server in background + start_health_server + + # Run the export script based on cron schedule or directly + if [ -n "$CRON_SCHEDULE" ]; then + log_message "INFO" "Setting up scheduled task with cron schedule: $CRON_SCHEDULE" + + # Function to calculate seconds until next cron execution + calculate_next_run() { + local cron_schedule="$1" + local cron_min cron_hour cron_day cron_month cron_dow + + # Parse the cron schedule + read -r cron_min cron_hour cron_day cron_month cron_dow <<< "$cron_schedule" + + # Redirect debug logs to a file instead of stdout to avoid interference with return value + log_message "DEBUG" "Parsed cron: minute=$cron_min, hour=$cron_hour, day=$cron_day, month=$cron_month, dow=$cron_dow" > /app/logs/cron_parser.log 2>&1 + + # Special case for * * * * * (every minute) + if [ "$cron_min" = "*" ] && [ "$cron_hour" = "*" ] && [ "$cron_day" = "*" ] && [ "$cron_month" = "*" ]; then + log_message "INFO" "Schedule format detected: every minute" >> /app/logs/cron_parser.log 2>&1 + echo "60" + return + fi + + # Special case for */X format (every X minutes) + if [[ "$cron_min" =~ ^\*/([0-9]+)$ ]]; then + local minutes="${BASH_REMATCH[1]}" + log_message "INFO" "Schedule format detected: every $minutes minutes" >> /app/logs/cron_parser.log 2>&1 + echo "$((minutes * 60))" + return + fi + + # Handle daily schedule at specific time (e.g., "0 3 * * *" = every day at 3am) + if [[ "$cron_min" =~ ^[0-9]+$ ]] && [[ "$cron_hour" =~ ^[0-9]+$ ]] && [ "$cron_day" = "*" ] && [ "$cron_month" = "*" ]; then + log_message "INFO" "Schedule format detected: daily at $cron_hour:$cron_min" >> /app/logs/cron_parser.log 2>&1 + + # Get current hour and minute + local current_hour=$(date +%H) + local current_min=$(date +%M) + + # Convert everything to minutes since midnight + local schedule_minutes=$((cron_hour * 60 + cron_min)) + local current_minutes=$((current_hour * 60 + current_min)) + + local wait_minutes=0 + + # If scheduled time is in the future today + if [ $schedule_minutes -gt $current_minutes ]; then + wait_minutes=$((schedule_minutes - current_minutes)) + else + # Schedule is for tomorrow + wait_minutes=$((schedule_minutes + 1440 - current_minutes)) + fi + + log_message "INFO" "Next run in $wait_minutes minutes" >> /app/logs/cron_parser.log 2>&1 + echo "$((wait_minutes * 60))" + return + fi + + # For unsupported formats, default to hourly + log_message "WARN" "Complex cron format not fully supported, using hourly schedule" >> /app/logs/cron_parser.log 2>&1 + echo "3600" + } + + # Run in a loop with the cron schedule + while true; do + # Get current timestamp for log filename + TIMESTAMP=$(date '+%Y-%m-%d_%H-%M-%S') + LOG_FILE="/app/logs/cron_export_${TIMESTAMP}.log" + + # Create a more visual log header + log_message "INFO" "🔄 ==================== SCHEDULED EXPORT START ====================" + log_message "INFO" "📅 Date: $(date '+%Y-%m-%d') ⏰ Time: $(date '+%H:%M:%S')" + log_message "INFO" "📋 Export option: ${EXPORT_OPTION}" + log_message "INFO" "📝 Log file: ${LOG_FILE}" + log_message "INFO" "🔄 ==========================================================" + + # Run the export script and log output + log_message "INFO" "🚀 Launching export script..." + START_TIME=$(date +%s) + + # Pass the LIMIT_FILMS parameter if set + if [ -n "$LIMIT_FILMS" ]; then + log_message "INFO" "🎯 Limiting to ${LIMIT_FILMS} films" + export LIMIT_FILMS + fi + + /app/Export_Trakt_4_Letterboxd.sh $EXPORT_OPTION > "$LOG_FILE" 2>&1 + EXIT_CODE=$? + END_TIME=$(date +%s) + DURATION=$((END_TIME - START_TIME)) + + # Count films in generated CSV files + count_films_in_csv() { + # Check each type of CSV file that might be generated + local total_films=0 + local csv_found=false + local report="" + local raw_count=0 + + # Define all possible CSV files to check + local csv_files=( + "/app/copy/letterboxd_import.csv" + "/app/copy/trakt_movies_history.csv" + "/app/copy/trakt_movies_watched.csv" + "/app/copy/trakt_movies_watchlist.csv" + "/app/copy/trakt_ratings.csv" + ) + + # Check for raw output file + if [ -f "/app/TEMP/watched_raw_output.csv" ]; then + raw_count=$(($(wc -l < "/app/TEMP/watched_raw_output.csv") - 1)) + log_message "INFO" "📊 Raw Trakt data: ${raw_count} films" + fi + + # Check other temp files to diagnose potential issues + local temp_files=( + "/app/TEMP/watched_filtered_output.csv" + "/app/TEMP/ratings_output.csv" + "/app/TEMP/watchlist_output.csv" + "/app/TEMP/shows_watched.csv" + ) + + for temp_file in "${temp_files[@]}"; do + if [ -f "$temp_file" ]; then + local temp_count=$(($(wc -l < "$temp_file") - 1)) + local temp_name=$(basename "$temp_file") + log_message "INFO" "📁 Temp file ${temp_name}: ${temp_count} entries" + fi + done + + # Check each file and count lines (minus header) + for csv_file in "${csv_files[@]}"; do + if [ -f "$csv_file" ]; then + csv_found=true + # Get line count minus header line + local count=$(($(wc -l < "$csv_file") - 1)) + local filename=$(basename "$csv_file") + + # Add to total and report + total_films=$((total_films + count)) + + # Check if this is letterboxd_import and raw_count exists + if [[ "$filename" == "letterboxd_import.csv" ]] && [[ $raw_count -gt 0 ]]; then + local diff=$((raw_count - count)) + if [ $diff -gt 0 ]; then + report="${report}📊 ${filename}: ${count} films (${diff} films missing from raw data)\n" + else + report="${report}📊 ${filename}: ${count} films\n" + fi + else + report="${report}📊 ${filename}: ${count} films\n" + fi + fi + done + + if [ "$csv_found" = true ]; then + log_message "INFO" "📋 Export summary:" + # Print report without trailing newline + echo -e "$report" | sed '/^$/d' | while IFS= read -r line; do + log_message "INFO" "$line" + done + log_message "INFO" "📊 Total: $total_films films in export files" + else + log_message "WARN" "⚠️ No CSV files found to count films" + fi + } + + # Log completion with duration and status + if [ $EXIT_CODE -eq 0 ]; then + log_message "SUCCESS" "✅ Export completed successfully in ${DURATION}s" + # Count and log film numbers + count_films_in_csv + else + log_message "ERROR" "❌ Export failed with exit code ${EXIT_CODE} after ${DURATION}s" + fi + + # Calculate time until next run - capture the output in a variable + SLEEP_SECONDS=$(calculate_next_run "$CRON_SCHEDULE") + + # Calculer la date/heure de la prochaine exécution de manière compatible avec Alpine Linux (BusyBox) + NEXT_RUN_TIME=$(date -d "@$(($(date +%s) + SLEEP_SECONDS))" '+%Y-%m-%d %H:%M:%S' 2>/dev/null || date "+%Y-%m-%d %H:%M:%S" -d "@$(($(date +%s) + SLEEP_SECONDS))" 2>/dev/null || echo "dans ${SLEEP_SECONDS}s") + + # Log details about timing with visual elements + if [ "$SLEEP_SECONDS" -ge 3600 ]; then + HOURS=$((SLEEP_SECONDS / 3600)) + MINUTES=$(((SLEEP_SECONDS % 3600) / 60)) + log_message "INFO" "⏱️ Next run in ${HOURS}h ${MINUTES}m (at ${NEXT_RUN_TIME})" + else + MINUTES=$((SLEEP_SECONDS / 60)) + log_message "INFO" "⏱️ Next run in ${MINUTES} minutes (at ${NEXT_RUN_TIME})" + fi + log_message "INFO" "🔄 ==================== SCHEDULED EXPORT END =====================" + + # Wait for the next interval + sleep $SLEEP_SECONDS + done + else + # Run script once + log_message "INFO" "Running export script once with option: $EXPORT_OPTION" + exec /app/Export_Trakt_4_Letterboxd.sh "$EXPORT_OPTION" + fi +fi \ No newline at end of file diff --git a/docker/README.md b/docker/README.md deleted file mode 100644 index de3507c..0000000 --- a/docker/README.md +++ /dev/null @@ -1,335 +0,0 @@ -# Docker Usage Guide - Export Trakt 4 Letterboxd - -This guide explains how to use the Docker Compose services with the new `--run` and `--schedule` functionality. - -## Overview - -The Docker Compose configuration now supports three execution modes: - -1. **Immediate Execution (`--run`)**: Execute once and exit -2. **Scheduled Execution (`--schedule`)**: Run on a cron schedule -3. **Legacy Mode**: Traditional command-based approach (for backward compatibility) - -## Quick Start - -### Test Your Configuration - -```bash -# Quick test to verify your configuration works -docker compose --profile run-watched up -``` - -### Production Scheduler - -```bash -# Start a production scheduler (every 6 hours) -docker compose --profile schedule-6h up -d -``` - -### Check Status - -```bash -# View scheduler logs -docker compose --profile schedule-6h logs -f -``` - -## Immediate Execution Services (`--run`) - -These services execute once and then exit. Perfect for: - -- Testing configurations -- Manual exports -- CI/CD integration - -### Available Services - -| Service | Profile | Description | Command | -| ----------------------------- | ---------------- | ----------------------------- | ----------------------------------------- | -| `export-trakt-run-watched` | `run-watched` | Export watched movies only | `--run --export watched --mode normal` | -| `export-trakt-run-all` | `run-all` | Export all data (recommended) | `--run --export all --mode complete` | -| `export-trakt-run-collection` | `run-collection` | Export collection only | `--run --export collection --mode normal` | -| `export-trakt-run-ratings` | `run-ratings` | Export ratings only | `--run --export ratings --mode complete` | -| `export-trakt-run-watchlist` | `run-watchlist` | Export watchlist only | `--run --export watchlist --mode normal` | -| `export-trakt-run-shows` | `run-shows` | Export shows only | `--run --export shows --mode complete` | - -### Usage Examples - -```bash -# Export all data immediately -docker compose --profile run-all up - -# Export only watched movies -docker compose --profile run-watched up - -# Export specific data types -docker compose --profile run-collection up -docker compose --profile run-ratings up -docker compose --profile run-watchlist up -docker compose --profile run-shows up - -# Run multiple exports sequentially -docker compose --profile run-watched up && \ -docker compose --profile run-ratings up -``` - -## Scheduled Execution Services (`--schedule`) - -These services run continuously according to a cron schedule. Perfect for: - -- Production automation -- Regular backups -- Unattended operation - -### Available Services - -| Service | Profile | Schedule | Description | -| ------------------------------ | ----------------- | ------------------ | ---------------------------- | -| `export-trakt-schedule-6h` | `schedule-6h` | Every 6 hours | Recommended for production | -| `export-trakt-schedule-daily` | `schedule-daily` | Daily at 2:30 AM | Daily comprehensive export | -| `export-trakt-schedule-weekly` | `schedule-weekly` | Sundays at 3:00 AM | Weekly backup | -| `export-trakt-schedule-15min` | `schedule-15min` | Every 15 minutes | High-frequency testing | -| `export-trakt-schedule-custom` | `schedule-custom` | Configurable | Custom schedule via env vars | - -### Usage Examples - -```bash -# Production scheduler (every 6 hours) -docker compose --profile schedule-6h up -d - -# Daily backup at 2:30 AM -docker compose --profile schedule-daily up -d - -# Weekly comprehensive backup -docker compose --profile schedule-weekly up -d - -# High-frequency testing (every 15 minutes) -docker compose --profile schedule-15min up -d - -# Custom schedule using environment variables -SCHEDULE="0 */4 * * *" EXPORT_TYPE="watched" EXPORT_MODE="normal" \ -docker compose --profile schedule-custom up -d -``` - -## Custom Configuration - -### Environment Variables for Custom Scheduler - -The `schedule-custom` profile accepts these environment variables: - -| Variable | Default | Description | Example | -| ------------- | ------------- | ------------------------ | ---------------------------------------- | -| `SCHEDULE` | `0 */6 * * *` | Cron schedule expression | `"30 2 * * *"` | -| `EXPORT_TYPE` | `all` | Type of export | `watched`, `collection`, `ratings`, etc. | -| `EXPORT_MODE` | `complete` | Export mode | `normal`, `initial`, `complete` | - -### Custom Schedule Examples - -```bash -# Export watched movies every 4 hours -SCHEDULE="0 */4 * * *" EXPORT_TYPE="watched" EXPORT_MODE="normal" \ -docker compose --profile schedule-custom up -d - -# Export all data every Monday at 9 AM -SCHEDULE="0 9 * * 1" EXPORT_TYPE="all" EXPORT_MODE="complete" \ -docker compose --profile schedule-custom up -d - -# Export ratings daily at noon -SCHEDULE="0 12 * * *" EXPORT_TYPE="ratings" EXPORT_MODE="complete" \ -docker compose --profile schedule-custom up -d -``` - -## Legacy Services (Backward Compatibility) - -These services use the traditional command-based approach: - -| Service | Profile | Description | -| ------------------------ | ------------------------------- | ------------------------ | -| `export-trakt` | `default`, `legacy` | Normal export (legacy) | -| `export-trakt-complete` | `complete`, `legacy` | Complete export (legacy) | -| `export-trakt-initial` | `initial`, `legacy` | Initial export (legacy) | -| `export-trakt-scheduled` | `scheduled`, `legacy-scheduled` | Legacy cron system | - -## Management Commands - -### Start Services - -```bash -# Start immediately and view logs -docker compose --profile run-all up - -# Start in background (detached) -docker compose --profile schedule-6h up -d -``` - -### Monitor Services - -```bash -# View logs (follow mode) -docker compose --profile schedule-6h logs -f - -# View logs for specific time period -docker compose --profile schedule-6h logs --since="2h" - -# Check service status -docker compose --profile schedule-6h ps -``` - -### Stop Services - -```bash -# Stop specific service -docker compose --profile schedule-6h down - -# Stop all services -docker compose down - -# Stop and remove volumes -docker compose down -v -``` - -### Restart Services - -```bash -# Restart scheduler -docker compose --profile schedule-6h restart - -# Restart with new configuration -docker compose --profile schedule-6h down -docker compose --profile schedule-6h up -d -``` - -## Volume Management - -The Docker Compose setup uses the following volumes: - -| Volume | Purpose | Local Path | -| ------------- | ------------------- | ----------- | -| Configuration | TOML config files | `./config` | -| Logs | Application logs | `./logs` | -| Exports | Generated CSV files | `./exports` | - -### Backup Your Data - -```bash -# Create backup of configuration and exports -tar -czf trakt-backup-$(date +%Y%m%d).tar.gz config/ exports/ logs/ - -# Restore from backup -tar -xzf trakt-backup-20240120.tar.gz -``` - -## Troubleshooting - -### Common Issues - -1. **Service won't start** - - ```bash - # Check logs for errors - docker compose --profile run-watched logs - - # Validate configuration first - docker compose --profile validate up - ``` - -2. **Invalid cron schedule** - - ```bash - # Test with a simple schedule first - SCHEDULE="*/5 * * * *" docker compose --profile schedule-custom up - ``` - -3. **Permission issues** - ```bash - # Fix volume permissions - sudo chown -R 1000:1000 config/ logs/ exports/ - ``` - -### Debug Commands - -```bash -# Test immediate execution -docker compose --profile run-watched up - -# Check if config is valid -docker compose --profile validate up - -# View detailed logs -docker compose --profile schedule-6h logs --timestamps - -# Connect to running container -docker compose --profile schedule-6h exec export-trakt-schedule-6h sh -``` - -## Best Practices - -### 1. Start with Testing - -```bash -# Always test your configuration first -docker compose --profile run-watched up -``` - -### 2. Use Appropriate Schedules - -- **Production**: Every 6-12 hours (`schedule-6h`) -- **Development**: Every 15-30 minutes (`schedule-15min`) -- **Backup**: Weekly (`schedule-weekly`) - -### 3. Monitor Resource Usage - -```bash -# Check container resource usage -docker stats - -# View disk usage -docker system df -``` - -### 4. Regular Maintenance - -```bash -# Clean up old containers and images -docker system prune - -# Update to latest image -docker compose pull -docker compose --profile schedule-6h up -d -``` - -### 5. Backup Configuration - -Always backup your `config/` directory before making changes. - -## Production Deployment - -### Recommended Setup - -```bash -# 1. Test configuration -docker compose --profile run-watched up - -# 2. Start production scheduler -docker compose --profile schedule-6h up -d - -# 3. Set up log rotation (optional) -# Add logrotate configuration for ./logs/*.log - -# 4. Monitor with external tools -# Set up monitoring for container health -``` - -### Health Checks - -```bash -# Check if scheduler is running -docker compose --profile schedule-6h ps - -# View recent logs -docker compose --profile schedule-6h logs --tail=50 - -# Check export files are being created -ls -la exports/ -``` - -This Docker setup provides a flexible and robust way to run Export Trakt 4 Letterboxd with the new scheduling capabilities! diff --git a/docker/debug-logs.sh b/docker/debug-logs.sh deleted file mode 100755 index 7dee5ce..0000000 --- a/docker/debug-logs.sh +++ /dev/null @@ -1,116 +0,0 @@ -#!/bin/bash - -# Debug script for monitoring Docker container logs in real-time -# Export Trakt 4 Letterboxd - -echo "=== Docker Logs Debug Script ===" -echo "" - -# Function to display usage -usage() { - echo "Usage: $0 [service-profile] [options]" - echo "" - echo "Service profiles:" - echo " schedule-15min - Every 15 minutes (testing)" - echo " schedule-6h - Every 6 hours (production)" - echo " schedule-daily - Daily at 2:30 AM" - echo " schedule-weekly - Weekly on Sundays" - echo "" - echo "Options:" - echo " --follow, -f - Follow logs in real-time (default)" - echo " --tail N - Show last N lines (default: 50)" - echo " --since TIME - Show logs since TIME (e.g., '10m', '1h', '2023-01-01T10:00:00')" - echo " --timestamps - Show timestamps" - echo " --help, -h - Show this help" - echo "" - echo "Examples:" - echo " $0 schedule-15min" - echo " $0 schedule-6h --tail 100" - echo " $0 schedule-daily --since 1h" - echo "" -} - -# Default values -PROFILE="schedule-15min" -FOLLOW=true -TAIL=50 -SINCE="" -TIMESTAMPS=false - -# Parse arguments -while [[ $# -gt 0 ]]; do - case $1 in - schedule-15min|schedule-6h|schedule-daily|schedule-weekly) - PROFILE="$1" - shift - ;; - --follow|-f) - FOLLOW=true - shift - ;; - --tail) - TAIL="$2" - shift 2 - ;; - --since) - SINCE="$2" - shift 2 - ;; - --timestamps) - TIMESTAMPS=true - shift - ;; - --help|-h) - usage - exit 0 - ;; - *) - echo "Unknown option: $1" - usage - exit 1 - ;; - esac -done - -echo "Monitoring logs for profile: $PROFILE" -echo "" - -# Check if service is running -if ! docker compose --profile "$PROFILE" ps | grep -q "Up"; then - echo "⚠️ Service with profile '$PROFILE' is not running." - echo "" - echo "To start the service:" - echo " docker compose --profile $PROFILE up -d" - echo "" - echo "Current running services:" - docker compose ps --filter "status=running" - exit 1 -fi - -# Build docker logs command -LOG_CMD="docker compose --profile $PROFILE logs" - -if [ "$FOLLOW" = true ]; then - LOG_CMD="$LOG_CMD --follow" -fi - -if [ -n "$TAIL" ]; then - LOG_CMD="$LOG_CMD --tail=$TAIL" -fi - -if [ -n "$SINCE" ]; then - LOG_CMD="$LOG_CMD --since=$SINCE" -fi - -if [ "$TIMESTAMPS" = true ]; then - LOG_CMD="$LOG_CMD --timestamps" -fi - -echo "Command: $LOG_CMD" -echo "" -echo "🔍 Monitoring logs... (Press Ctrl+C to stop)" -echo "============================================" -echo "" - -# Execute the command -eval $LOG_CMD \ No newline at end of file diff --git a/docker/test-docker.sh b/docker/test-docker.sh deleted file mode 100755 index bdef544..0000000 --- a/docker/test-docker.sh +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/bash - -# Test script for Docker Compose with new --run and --schedule features -# Export Trakt 4 Letterboxd - -echo "=== Docker Compose Test Script - Export Trakt 4 Letterboxd ===" -echo "" - -# Check if Docker and Docker Compose are available -if ! command -v docker &> /dev/null; then - echo "❌ Docker is not installed or not in PATH" - exit 1 -fi - -if ! command -v docker compose &> /dev/null; then - echo "❌ Docker Compose is not available" - exit 1 -fi - -echo "✅ Docker and Docker Compose are available" -echo "" - -# Test 1: Validate Docker Compose configuration -echo "=== Test 1: Validating Docker Compose configuration ===" -if docker compose config --quiet; then - echo "✅ Docker Compose configuration is valid" -else - echo "❌ Docker Compose configuration is invalid" - exit 1 -fi -echo "" - -# Test 2: Show available services -echo "=== Test 2: Available services ===" -echo "Immediate execution services (--run):" -echo " - run-watched: Export watched movies only" -echo " - run-all: Export all data (recommended for testing)" -echo " - run-collection: Export collection only" -echo " - run-ratings: Export ratings only" -echo " - run-watchlist: Export watchlist only" -echo " - run-shows: Export shows only" -echo "" -echo "Scheduled services (--schedule):" -echo " - schedule-6h: Every 6 hours (production)" -echo " - schedule-daily: Daily at 2:30 AM" -echo " - schedule-weekly: Weekly on Sundays at 3:00 AM" -echo " - schedule-15min: Every 15 minutes (testing)" -echo " - schedule-custom: Custom schedule via environment variables" -echo "" - -# Test 3: List all available profiles -echo "=== Test 3: Available Docker Compose profiles ===" -docker compose config --profiles 2>/dev/null || echo "Profile listing not supported in this Docker Compose version" -echo "" - -# Test 4: Test configuration validation (if config exists) -echo "=== Test 4: Testing configuration validation ===" -if [ -f "config/config.toml" ]; then - echo "Configuration file found. Testing validation..." - if timeout 30s docker compose --profile validate up --remove-orphans 2>/dev/null; then - echo "✅ Configuration validation passed" - else - echo "⚠️ Configuration validation completed (may need setup)" - fi -else - echo "⚠️ No configuration file found at config/config.toml" - echo " Run: docker compose --profile setup up" -fi -echo "" - -# Test 5: Show example commands -echo "=== Test 5: Example commands ===" -echo "" -echo "🚀 Quick start commands:" -echo "" -echo "1. Setup (first time):" -echo " docker compose --profile setup up" -echo "" -echo "2. Test your configuration:" -echo " docker compose --profile run-watched up" -echo "" -echo "3. Export all data once:" -echo " docker compose --profile run-all up" -echo "" -echo "4. Start production scheduler (every 6 hours):" -echo " docker compose --profile schedule-6h up -d" -echo "" -echo "5. View scheduler logs:" -echo " docker compose --profile schedule-6h logs -f" -echo "" -echo "6. Stop scheduler:" -echo " docker compose --profile schedule-6h down" -echo "" -echo "📋 Specific export types:" -echo " docker compose --profile run-collection up # Collection only" -echo " docker compose --profile run-ratings up # Ratings only" -echo " docker compose --profile run-watchlist up # Watchlist only" -echo " docker compose --profile run-shows up # Shows only" -echo "" -echo "⏰ Different schedules:" -echo " docker compose --profile schedule-daily up -d # Daily at 2:30 AM" -echo " docker compose --profile schedule-weekly up -d # Weekly backup" -echo " docker compose --profile schedule-15min up -d # Every 15 min (testing)" -echo "" -echo "🎛️ Custom schedule:" -echo " SCHEDULE=\"0 */4 * * *\" EXPORT_TYPE=\"watched\" EXPORT_MODE=\"normal\" \\" -echo " docker compose --profile schedule-custom up -d" -echo "" - -# Test 6: Show Docker information -echo "=== Test 6: Docker environment information ===" -echo "Docker version:" -docker version --format "{{.Client.Version}}" 2>/dev/null || echo "Could not get Docker version" -echo "" -echo "Docker Compose version:" -docker compose version --short 2>/dev/null || echo "Could not get Docker Compose version" -echo "" - -echo "=== Docker Compose Test Completed! ===" -echo "" -echo "✅ Ready to use the new Docker Compose features!" -echo "" -echo "Next steps:" -echo "1. If you haven't already, run setup: docker compose --profile setup up" -echo "2. Test your config: docker compose --profile run-watched up" -echo "3. For production: docker compose --profile schedule-6h up -d" -echo "" -echo "For detailed usage, see: docker/README.md" \ No newline at end of file diff --git a/docs/AUTO_TAGGING.md b/docs/AUTO_TAGGING.md new file mode 100644 index 0000000..5ed96ee --- /dev/null +++ b/docs/AUTO_TAGGING.md @@ -0,0 +1,68 @@ +# Automatic Version Tagging + +This project uses GitHub Actions to automatically create version tags and releases when Pull Requests are merged into the main branch. + +## How It Works + +The automatic tagging workflow is defined in `.github/workflows/auto-tag.yml` and performs the following actions: + +1. **Trigger**: The workflow is triggered when a Pull Request is closed (merged) into the main branch. + +2. **Version Calculation**: + + - The workflow finds the latest version tag (format: `vX.Y.Z`) + - It increments the patch version by 1 (e.g., `v1.0.0` → `v1.0.1`) + - If no previous tag exists, it starts with `v1.0.0` + +3. **Tag Creation**: + + - Creates an annotated Git tag with the new version + - The tag message includes the PR number and title + - Pushes the tag to the repository + +4. **Release Creation**: + - Automatically creates a GitHub Release using the new tag + - The release includes: + - The PR title and number + - The PR description as the release notes + - Information about who merged the PR + +## Benefits + +This automatic tagging system provides several benefits: + +- **Consistent Versioning**: Ensures that each merged PR results in a properly versioned release +- **Traceability**: Links releases directly to the Pull Requests that created them +- **Documentation**: Automatically generates release notes from PR descriptions +- **CI/CD Integration**: The new tags can trigger other workflows, such as Docker image builds + +## Best Practices for Pull Requests + +To make the most of this automatic tagging system: + +1. **Descriptive PR Titles**: Use clear, concise titles that describe the changes +2. **Detailed PR Descriptions**: Include comprehensive descriptions that can serve as good release notes +3. **One Feature Per PR**: Keep PRs focused on a single feature or fix for cleaner release notes + +## Manual Version Bumps + +For major or minor version bumps (instead of patch): + +1. Create a tag manually before merging the PR: + + ```bash + git tag -a v2.0.0 -m "Major version bump for XYZ feature" + git push origin v2.0.0 + ``` + +2. The auto-tagging workflow will detect this as the latest tag and increment from there. + +## Troubleshooting + +If the automatic tagging doesn't work as expected: + +1. Check the GitHub Actions logs for any errors +2. Ensure the PR was properly merged into the main branch +3. Verify that the repository has the correct permissions set for GitHub Actions + +For more information on GitHub Actions, see [GitHub Actions Documentation](https://docs.github.com/en/actions). diff --git a/docs/CONFIGURATION.md b/docs/CONFIGURATION.md new file mode 100644 index 0000000..4806fe7 --- /dev/null +++ b/docs/CONFIGURATION.md @@ -0,0 +1,132 @@ +# Configuration and Basic Usage + +This document provides detailed information about configuring and using the Export Trakt 4 Letterboxd application. + +## Prerequisites + +- A Trakt.tv account +- A Trakt.tv application (Client ID and Client Secret) +- jq (for JSON processing) +- curl (for API requests) + +## Creating a Trakt.tv Application + +1. Log in to your Trakt.tv account +2. Go to https://trakt.tv/oauth/applications +3. Click on "New Application" +4. Fill in the information: + - Name: Export Trakt 4 Letterboxd + - Redirect URL: urn:ietf:wg:oauth:2.0:oob + - Description: (optional) +5. Save the application +6. Note your Client ID and Client Secret + +## Setting Up the Configuration File + +Copy the example configuration file to create your own: + +```bash +cp .config.cfg.example .config.cfg +``` + +You can edit the configuration file manually if you prefer, but it's recommended to use the setup script in the next step. + +## Authentication Configuration + +Run the configuration script: + +```bash +./setup_trakt.sh +``` + +This script will guide you through the following steps: + +1. Enter your Client ID and Client Secret +2. Enter your Trakt username +3. Obtain an authorization code +4. Generate access tokens + +## Basic Usage + +### Export Your Data + +```bash +./Export_Trakt_4_Letterboxd.sh [option] +``` + +Available options: + +- `normal` (default): Exports rated movies, rated episodes, movie and TV show history, and watchlist +- `initial`: Exports only rated and watched movies +- `complete`: Exports all available data + +### Result + +The script generates a `letterboxd_import.csv` file that you can import on Letterboxd at the following address: https://letterboxd.com/import/ + +## Configuration File Options + +The configuration file (`.config.cfg`) contains several options that you can customize: + +``` +# Trakt API credentials +CLIENT_ID="YOUR_TRAKT_CLIENT_ID" +CLIENT_SECRET="YOUR_TRAKT_CLIENT_SECRET" +TRAKT_USERNAME="YOUR_TRAKT_USERNAME" + +# TMDB API key (optional, for better movie matching) +TMDB_API_KEY="YOUR_TMDB_API_KEY" + +# Export options +EXPORT_RATINGS=true +EXPORT_HISTORY=true +EXPORT_WATCHLIST=true +EXPORT_EPISODES=true + +# Date format for export (YYYY-MM-DD) +DATE_FORMAT="%Y-%m-%d" + +# Minimum rating to export (1-10) +MIN_RATING=1 + +# Export path +EXPORT_PATH="/app/copy" + +# Backup options +BACKUP_ENABLED=true +BACKUP_DIR="/app/backup" + +# Log options +LOG_ENABLED=true +LOG_DIR="/app/logs" +LOG_LEVEL="info" + +# Advanced options +USE_TMDB_FOR_MATCHING=true +INCLUDE_YEAR_IN_TITLE=true +INCLUDE_LETTERBOXD_TAGS=true +``` + +## Troubleshooting + +### No Data is Exported + +If the script runs without error but no data is exported: + +1. Check that your Trakt.tv profile is public +2. Verify that you have correctly configured authentication +3. Run the configuration script again: `./setup_trakt.sh` + +### Authentication Errors + +If you encounter authentication errors: + +1. Check that your Client ID and Client Secret are correct +2. Get a new access token by running `./setup_trakt.sh` + +### File Permission Issues + +If you encounter file permission issues: + +1. Make sure the scripts are executable: `chmod +x *.sh` +2. Check that you have write permissions to the output directories diff --git a/docs/DOCKER_TESTING.md b/docs/DOCKER_TESTING.md new file mode 100644 index 0000000..78c1f29 --- /dev/null +++ b/docs/DOCKER_TESTING.md @@ -0,0 +1,101 @@ +# Docker Testing Workflow + +This document explains the automated testing workflow for Docker images in this project. The workflow is designed to verify that Docker images are functional and error-free before they are merged into the main or develop branches. + +## Overview + +The Docker testing workflow is defined in `.github/workflows/docker-test.yml` and is automatically triggered when a Pull Request is opened against the `main` or `develop` branches. This ensures that all Docker-related changes are thoroughly tested before being integrated. + +## What Gets Tested + +The workflow performs a comprehensive series of tests on the Docker image: + +1. **Image Building**: Verifies that the Docker image can be built successfully from the Dockerfile. + +2. **Structure Verification**: Checks that all required files, directories, and permissions are correctly set up: + + - Essential scripts (`Export_Trakt_4_Letterboxd.sh`, `setup_trakt.sh`, `docker-entrypoint.sh`) + - Required directories (`config`, `logs`, `copy`, `brain_ops`, `backup`, `TEMP`) + - Proper executable permissions + +3. **Dependency Verification**: Ensures all required tools are installed: + + - `jq` for JSON processing + - `curl` for API requests + - `sed` for text manipulation + +4. **Configuration Handling**: Tests the configuration file handling: + + - Presence of the example configuration file + - Ability to create a working configuration file + +5. **Cron Setup**: Verifies that the cron job setup functionality works correctly. + +6. **Docker Compose**: Tests the Docker Compose configuration: + + - Validates the `docker-compose.yml` file + - Ensures the container can be started and stopped with Docker Compose + +7. **Volume Mounting**: Tests that volumes can be correctly mounted and accessed: + - Creates test directories for all required volumes + - Mounts these volumes to a test container + - Verifies that the container can access the mounted volumes + +## Test Steps + +The workflow consists of the following steps: + +1. **Checkout Repository**: Fetches the code from the repository. + +2. **Set up Docker Buildx**: Configures Docker for building the image. + +3. **Build Docker Image**: Builds the Docker image with the tag `trakt-export:test`. + +4. **Verify Docker Image**: Runs a series of tests to verify the structure and dependencies of the image. + +5. **Test Docker Compose**: Validates and tests the Docker Compose configuration. + +6. **Test Docker Image with Mock Data**: Creates a test environment with mock data and verifies that the container can access and use this data. + +7. **Summary**: Provides a summary of the test results. + +## Test Output + +The workflow provides detailed output for each test step, including: + +- 🔍 Descriptive messages indicating what is being tested +- ✅ Success indicators for passed tests +- Detailed error messages for failed tests + +If any test fails, the workflow will exit with a non-zero status code, causing the GitHub Actions check to fail. This prevents merging Pull Requests with broken Docker functionality. + +## Running Tests Locally + +You can run similar tests locally to verify your Docker image before creating a Pull Request: + +```bash +# Build the Docker image +docker build -t trakt-export:test . + +# Verify the image structure +docker run --rm trakt-export:test ls -la /app + +# Test with Docker Compose +docker compose config +docker compose up -d +docker compose ps +docker compose down +``` + +## Troubleshooting + +If the Docker testing workflow fails, check the GitHub Actions logs for detailed error messages. Common issues include: + +1. **Missing Dependencies**: Ensure all required tools are installed in the Dockerfile. +2. **Permission Issues**: Check that scripts have the correct executable permissions. +3. **Configuration Problems**: Verify that the configuration file handling is working correctly. +4. **Volume Mounting Issues**: Ensure that the volume paths are correctly defined. + +## Extending the Tests + +To add more tests to the workflow, edit the `.github/workflows/docker-test.yml` file and add new steps or commands to the existing steps. Make sure to include descriptive messages and clear success/failure indicators. diff --git a/docs/DOCKER_USAGE.md b/docs/DOCKER_USAGE.md new file mode 100644 index 0000000..87f47c2 --- /dev/null +++ b/docs/DOCKER_USAGE.md @@ -0,0 +1,272 @@ +# Docker Usage Guide + +This document provides detailed information about using the Export Trakt 4 Letterboxd application with Docker. + +## Prerequisites + +- Docker installed on your system +- Docker Compose (optional, but recommended) +- For multi-architecture builds: Docker Buildx + +## Using Docker Compose (Recommended) + +### Quick Start + +1. Clone the repository: + + ```bash + git clone https://github.com/JohanDevl/Export_Trakt_4_Letterboxd.git + cd Export_Trakt_4_Letterboxd + ``` + +2. Build and start the container: + + ```bash + docker compose up + ``` + + This will build and run the container once, which will execute the script and exit. + +### Initial Setup + +For first-time setup, use the setup profile: + +```bash +docker compose --profile setup up +``` + +This will launch an interactive container to set up your Trakt authentication. + +### Running on a Schedule + +To run the exporter on a schedule: + +```bash +docker compose --profile scheduled up -d +``` + +This will start the container in the background and execute the export script according to the cron schedule defined in the docker-compose.yml file. + +## Docker Compose Profiles + +The docker-compose.yml file includes several profiles for different use cases: + +- Default (no profile): Run once and exit +- `setup`: Run the initial setup script +- `scheduled`: Run with a cron schedule +- `env-config`: Run with all configuration via environment variables + +Example usage: + +```bash +# Run the initial setup +docker compose --profile setup up + +# Run with a cron schedule +docker compose --profile scheduled up -d + +# Run with configuration via environment variables +docker compose --profile env-config up -d +``` + +## Building Multi-Architecture Images + +You can build multi-architecture Docker images using the provided build script: + +```bash +# Build and push multi-arch images (amd64, arm64, armv7) +./build-docker.sh --tag v1.0.0 + +# Build for local platform only +./build-docker.sh --local + +# Build but don't push +./build-docker.sh --no-push + +# See all options +./build-docker.sh --help +``` + +## Environment Variables + +| Variable | Description | Default | +| --------------- | ------------------------------------------------------- | -------- | +| `TZ` | Timezone | `UTC` | +| `CRON_SCHEDULE` | Cron schedule expression (empty to run once) | Empty | +| `EXPORT_OPTION` | Export option (`normal`, `initial`, `complete`) | `normal` | +| `API_KEY` | Trakt API key (from config file unless specified) | Empty | +| `API_SECRET` | Trakt API secret (from config file unless specified) | Empty | +| `ACCESS_TOKEN` | Trakt access token (from config file unless specified) | Empty | +| `REFRESH_TOKEN` | Trakt refresh token (from config file unless specified) | Empty | +| `USERNAME` | Trakt username (from config file unless specified) | Empty | + +## Docker Volumes + +The Docker container uses the following volumes to persist data: + +- `/app/config`: Contains the configuration file +- `/app/logs`: Contains log files +- `/app/copy`: Contains the exported Letterboxd CSV file +- `/app/backup`: Contains Trakt API backup data + +## Docker Healthchecks + +The Docker container includes built-in health checks that verify: + +- Required directories are present and writable +- Required files are present and readable +- Required commands are available +- Trakt API connectivity (if credentials are configured) + +You can check the container health status using: + +```bash +docker inspect --format "{{.State.Health.Status}}" trakt-export +``` + +## Using Docker Secrets + +For production deployments, you can use Docker secrets to manage sensitive configuration: + +```yaml +version: "3.8" + +services: + trakt-export: + image: johandevl/export-trakt-4-letterboxd:latest + volumes: + - ./config:/app/config + - ./logs:/app/logs + - ./copy:/app/copy + - ./backup:/app/backup + environment: + - TZ=Europe/Paris + - CRON_SCHEDULE=0 3 * * * + - EXPORT_OPTION=normal + secrets: + - trakt_api_key + - trakt_api_secret + - trakt_access_token + - trakt_refresh_token + +secrets: + trakt_api_key: + file: ./secrets/api_key.txt + trakt_api_secret: + file: ./secrets/api_secret.txt + trakt_access_token: + file: ./secrets/access_token.txt + trakt_refresh_token: + file: ./secrets/refresh_token.txt +``` + +## Advanced Docker Compose Examples + +### Production Deployment with Resource Limits + +```yaml +version: "3.8" + +services: + trakt-export: + image: johandevl/export-trakt-4-letterboxd:latest + container_name: trakt-export + volumes: + - ./config:/app/config + - ./logs:/app/logs + - ./copy:/app/copy + - ./backup:/app/backup + environment: + - TZ=Europe/Paris + - CRON_SCHEDULE=0 3 * * * + - EXPORT_OPTION=normal + restart: unless-stopped + healthcheck: + test: ["CMD", "/app/docker-entrypoint.sh", "healthcheck"] + interval: 1m + timeout: 10s + retries: 3 + deploy: + resources: + limits: + cpus: "0.5" + memory: 256M + reservations: + cpus: "0.1" + memory: 128M +``` + +### Integration with Traefik Reverse Proxy + +```yaml +version: "3.8" + +services: + trakt-export: + image: johandevl/export-trakt-4-letterboxd:latest + container_name: trakt-export + volumes: + - ./config:/app/config + - ./logs:/app/logs + - ./copy:/app/copy + - ./backup:/app/backup + environment: + - TZ=Europe/Paris + - CRON_SCHEDULE=0 3 * * * + - EXPORT_OPTION=normal + restart: unless-stopped + labels: + - "traefik.enable=true" + - "traefik.http.routers.trakt.rule=Host(`trakt.example.com`)" + - "traefik.http.routers.trakt.entrypoints=websecure" + - "traefik.http.routers.trakt.tls.certresolver=myresolver" + - "traefik.http.services.trakt.loadbalancer.server.port=8000" + networks: + - traefik + +networks: + traefik: + external: true +``` + +## Using Without Docker Compose + +If you prefer to use Docker directly without Docker Compose: + +```bash +docker run -it --name trakt-export \ + -v $(pwd)/config:/app/config \ + -v $(pwd)/logs:/app/logs \ + -v $(pwd)/copy:/app/copy \ + -v $(pwd)/backup:/app/backup \ + -e TZ=Europe/Paris \ + -e EXPORT_OPTION=complete \ + johandevl/export-trakt-4-letterboxd:latest +``` + +## Troubleshooting + +If you encounter issues with the Docker container: + +1. Check the container logs: + + ```bash + docker logs trakt-export + ``` + +2. Check the container health: + + ```bash + docker inspect --format "{{.State.Health.Status}}" trakt-export + ``` + +3. Enter the container to investigate: + + ```bash + docker exec -it trakt-export bash + ``` + +4. If the container is not starting, check the Docker daemon logs: + ```bash + docker system events + ``` diff --git a/docs/GITHUB_ACTIONS.md b/docs/GITHUB_ACTIONS.md new file mode 100644 index 0000000..c58d2f9 --- /dev/null +++ b/docs/GITHUB_ACTIONS.md @@ -0,0 +1,192 @@ +# GitHub Actions Workflow Documentation + +This document provides detailed information about the GitHub Actions workflow used in this project to build and publish Docker images to the GitHub Container Registry (ghcr.io). + +## Overview + +The workflow is defined in `.github/workflows/docker-publish.yml` and performs the following tasks: + +1. Builds a Docker image from the project +2. Pushes the image to GitHub Container Registry (ghcr.io) +3. Signs the image using Cosign for security +4. Automatically tags the latest build as "latest" for easy reference + +## Workflow Triggers + +The workflow is triggered by: + +- **Schedule**: Runs daily at 15:32 UTC (`cron: "32 15 * * *"`) +- **Push to main branch**: Any commits pushed to the `main` branch +- **Push to develop branch**: Any commits pushed to the `develop` branch +- **Version tags**: Any tags matching the pattern `v*.*.*` (e.g., `v1.0.0`) +- **Pull requests**: Any pull requests targeting the `main` branch + +## Workflow Steps + +The workflow consists of the following main steps: + +1. **Checkout repository**: Fetches the code from the repository +2. **Install Cosign**: Sets up Cosign for image signing (except on PRs) +3. **Set up Docker Buildx**: Configures Docker for multi-platform builds +4. **Log into registry**: Authenticates with GitHub Container Registry (except on PRs) +5. **Extract Docker metadata**: Prepares tags and labels for the image +6. **Build and push Docker image**: Builds the image and pushes it to the registry (except on PRs) +7. **Sign the published Docker image**: Signs the image using Cosign (except on PRs) + +## Testing the Workflow + +### Local Testing with `act` + +You can test the workflow locally using [act](https://github.com/nektos/act): + +```bash +# Install act +# macOS +brew install act + +# Linux +curl -s https://raw.githubusercontent.com/nektos/act/master/install.sh | sudo bash + +# Run the workflow for a push event +act push + +# Run the workflow for a specific event +act workflow_dispatch +``` + +Note: Some features like Cosign signing might not work correctly in local testing. + +### Testing on GitHub + +To test the workflow on GitHub: + +1. **Push to main branch**: + + ```bash + git add . + git commit -m "Test GitHub Actions workflow" + git push origin main + ``` + +2. **Create and push a version tag**: + + ```bash + git tag v1.0.0 + git push origin v1.0.0 + ``` + +3. **Manual trigger**: + - Go to your repository on GitHub + - Navigate to "Actions" tab + - Select the "Docker" workflow + - Click "Run workflow" dropdown + - Select the branch and click "Run workflow" + +## Customizing the Workflow + +### Changing the Schedule + +To change when the workflow runs automatically, modify the `cron` expression in the `schedule` section: + +```yaml +on: + schedule: + - cron: "32 15 * * *" # Current: 15:32 UTC daily +``` + +Common cron examples: + +- `0 0 * * *`: Daily at midnight UTC +- `0 */6 * * *`: Every 6 hours +- `0 0 * * 0`: Weekly on Sunday at midnight UTC + +### Image Tagging Strategy + +The workflow uses a comprehensive tagging strategy: + +1. **Semantic Versioning Tags** (for version tags like `v1.2.3`): + + - Full version: `v1.2.3` + - Minor version: `v1.2` + - Major version: `v1` + +2. **Branch and PR Tags**: + + - Branch name (e.g., `main`, `develop`) + - PR number (e.g., `pr-42`) + +3. **Special Tags**: + - The `latest` tag is automatically applied to: + - Builds from the `main` branch + - Builds triggered by version tags (e.g., `v1.2.3`) + - The `develop` tag is automatically applied to: + - Builds from the `develop` branch + +This ensures that users can always access: + +- The most recent stable version using the `latest` tag +- The most recent development version using the `develop` tag + +### Changing the Registry + +The workflow is configured to push to GitHub Container Registry (ghcr.io). To use a different registry: + +1. Modify the `REGISTRY` environment variable: + + ```yaml + env: + REGISTRY: docker.io # For Docker Hub + ``` + +2. Update the authentication step with appropriate credentials. + +### Multi-Platform Builds + +The workflow is set up for multi-platform builds using Docker Buildx. To specify platforms, add a `platforms` parameter to the build-and-push step: + +```yaml +- name: Build and push Docker image + uses: docker/build-push-action@v5.0.0 + with: + context: . + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + platforms: linux/amd64,linux/arm64 + cache-from: type=gha + cache-to: type=gha,mode=max +``` + +## Troubleshooting + +### Common Issues + +1. **Authentication Failures**: + + - Ensure your GitHub token has the necessary permissions + - Check that the repository has packages write permissions + +2. **Build Failures**: + + - Check the Dockerfile for errors + - Ensure all required files are included in the repository + +3. **Signing Issues**: + - Verify Cosign is installed correctly + - Check that the identity token is available + +### Viewing Workflow Logs + +To view detailed logs: + +1. Go to your repository on GitHub +2. Navigate to the "Actions" tab +3. Click on the specific workflow run +4. Expand the job and step that failed to see detailed logs + +## Additional Resources + +- [GitHub Actions Documentation](https://docs.github.com/en/actions) +- [Docker GitHub Action](https://github.com/docker/build-push-action) +- [Cosign Documentation](https://github.com/sigstore/cosign) +- [GitHub Container Registry Documentation](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry) diff --git a/docs/INTERNATIONALIZATION.md b/docs/INTERNATIONALIZATION.md new file mode 100644 index 0000000..21cd034 --- /dev/null +++ b/docs/INTERNATIONALIZATION.md @@ -0,0 +1,160 @@ +# Internationalization Guide + +This document explains how the internationalization (i18n) system works in the Export Trakt 4 Letterboxd project and how to contribute to translation. + +## Overview + +The internationalization system allows the script to be displayed in different languages based on user preferences. The process is automated and selects the language based on: + +1. The user's explicit configuration in the `.config.cfg` file +2. The operating system language if no configuration is specified +3. English as the default language if the system language is not supported + +## Supported Languages + +Currently, the following languages are supported: + +- English (en) - Default language +- French (fr) +- Spanish (es) +- German (de) +- Italian (it) + +## Configuration + +To explicitly set the language, edit the `config/.config.cfg` file and set the `LANGUAGE` variable: + +```bash +# Language for user interface (en, fr, es, de, it) +# Leave empty for automatic detection from the system +LANGUAGE="en" +``` + +To use the system language, simply leave this value empty: + +```bash +LANGUAGE="" +``` + +## File Structure + +The internationalization system is organized according to a standard structure: + +``` +Export_Trakt_4_Letterboxd/ +├── lib/ +│ ├── i18n.sh # Main internationalization module +│ └── ... +├── locales/ # Directory containing translations +│ ├── en/ # English +│ │ └── LC_MESSAGES/ +│ │ └── messages.sh # English messages file +│ ├── fr/ # French +│ │ └── LC_MESSAGES/ +│ │ └── messages.sh +│ ├── es/ # Spanish +│ │ └── LC_MESSAGES/ +│ │ └── messages.sh +│ ├── it/ # Italian +│ │ └── LC_MESSAGES/ +│ │ └── messages.sh +│ └── de/ # German +│ └── LC_MESSAGES/ +│ └── messages.sh +├── manage_translations.sh # Translation management utility +└── ... +``` + +## How It Works + +1. At startup, the script initializes the i18n module +2. The module loads the language specified in the configuration file or detects the system language +3. It then loads the corresponding messages from the appropriate language file +4. When displaying text to the user, the script uses the `_()` function to get the translated text + +## Translation Management Utility + +A translation utility `manage_translations.sh` is provided to help manage language files. It allows you to: + +- List available languages +- Create a template for a new language +- Update language files with new strings +- Display translation status for all languages + +### Using the Utility + +```bash +# Display help +./manage_translations.sh help + +# List available languages +./manage_translations.sh list + +# Create a template for a new language (ex: Italian) +./manage_translations.sh create it + +# Update all language files with new/missing strings +./manage_translations.sh update + +# Display translation status for all languages +./manage_translations.sh status +``` + +## Translation Contribution Guide + +If you want to contribute to translating the application into a new language or improving an existing translation, follow these steps: + +1. **For a new language:** + + - Run `./manage_translations.sh create xx` (where `xx` is the 2-letter language code) + - Edit the generated file in `locales/xx/LC_MESSAGES/messages.sh` + +2. **To update an existing translation:** + - Run `./manage_translations.sh update` to add missing strings + - Look for comments with `# TODO: Translate this` and translate those strings + +### Translation Tips + +- Keep special characters like `%s`, `%d`, etc. as they are used for variable insertion +- Respect case and punctuation when relevant +- Make sure the translated text has a similar meaning to the original text +- Test your translation by setting `LANGUAGE="xx"` in the configuration file + +## Message File Format + +Each message file is a bash script that declares message variables: + +```bash +#!/bin/bash +# +# Language: en +# + +# Define messages +# Variables must start with MSG_ to be recognized by the system + +# General messages +MSG_HELLO="Hello" +MSG_WELCOME="Welcome to Export Trakt 4 Letterboxd" +# More translations... +``` + +## Adding New Translatable Strings + +If you're developing new features that require adding new translatable strings: + +1. First add the string to the English file (`locales/en/LC_MESSAGES/messages.sh`) +2. Use the `_()` function to reference the string in your code +3. Run `./manage_translations.sh update` to update all language files + +## Debugging + +If you encounter issues with translations: + +1. Check that the language file exists and is properly formatted +2. Verify that the message key exists in the message file +3. If a translation is missing, the system will use the default English text + +## Locale-Specific Date and Time Formats + +In addition to text translation, the system also supports different date formats based on language. This allows dates to be displayed in a format familiar to users from each region. diff --git a/docs/MODULARIZATION.md b/docs/MODULARIZATION.md new file mode 100644 index 0000000..3606988 --- /dev/null +++ b/docs/MODULARIZATION.md @@ -0,0 +1,118 @@ +# Code Restructuring and Modularization + +This document outlines the changes made as part of Issue #12 to restructure and modularize the Export Trakt 4 Letterboxd codebase. + +## Overview + +The original script was a single monolithic file (`Export_Trakt_4_Letterboxd.sh`) containing all functionality. This approach made maintenance challenging, limited reusability, and complicated debugging efforts. + +The restructuring involved breaking down the script into separate modules, each with a specific responsibility. This improves code maintainability, testability, and facilitates future enhancements. + +## New Structure + +The codebase now follows a modular structure: + +``` +Export_Trakt_4_Letterboxd/ +├── lib/ # Library modules +│ ├── config.sh # Configuration management +│ ├── utils.sh # Utility functions and debugging +│ ├── trakt_api.sh # API interaction functions +│ ├── data_processing.sh # Data transformation functions +│ └── main.sh # Main orchestration module +├── Export_Trakt_4_Letterboxd.sh # Main script (simplified) +└── install.sh # New installation script +``` + +## Module Responsibilities + +### 1. lib/config.sh + +Handles all configuration-related functionality: + +- Loading configuration files +- Setting up directories (logs, copies, temp, backup) +- OS detection for cross-platform compatibility +- Environment logging + +### 2. lib/utils.sh + +Contains utility functions used across the application: + +- Debug message formatting and logging +- File information inspection +- Dependency checking +- Progress bar visualization +- Error handling + +### 3. lib/trakt_api.sh + +Manages all interactions with the Trakt API: + +- Token refresh and validation +- API endpoint determination based on mode +- Data fetching with proper error handling +- Authentication management + +### 4. lib/data_processing.sh + +Focuses on processing and transforming the data: + +- Creating lookup tables for ratings and play counts +- Processing movie history with timestamps and ratings +- Managing watched movies with deduplication +- Creating backup archives +- CSV file generation for Letterboxd + +### 5. lib/main.sh + +Acts as the orchestrator for the entire process: + +- Imports all required modules +- Initializes the environment +- Processes command line arguments +- Coordinates the data fetching and processing steps +- Handles the export workflow + +## New Installation Experience + +A new `install.sh` script has been added to simplify the setup process. This script: + +- Creates all required directories +- Checks for required dependencies +- Sets up the configuration file +- Sets appropriate file permissions +- Guides the user through the next steps + +## Benefits of the New Structure + +1. **Maintainability**: Each module has a single responsibility, making code easier to maintain. +2. **Testability**: Functions are isolated, enabling more effective testing. +3. **Readability**: Smaller, focused files are easier to read and understand. +4. **Extensibility**: New features can be added by extending specific modules without affecting others. +5. **Debugging**: Issues can be traced to specific modules, simplifying the debugging process. + +## Migration Notes + +The functionality of the original script remains intact, with the following improvements: + +- Enhanced error handling with detailed logging +- Better progress reporting during operations +- Improved cross-platform compatibility +- Clearer separation of concerns +- More robust dependency checking +- Simplified main script + +## Future Enhancements + +This modular structure facilitates future enhancements such as: + +1. Adding unit tests for individual functions +2. Implementing additional data export formats +3. Supporting more API endpoints and data types +4. Enhancing the user interface (CLI or web-based) +5. Extending support for other services beyond Letterboxd + +## Conclusion + +The restructuring provides a solid foundation for maintaining and extending the Export Trakt 4 Letterboxd tool. The modular approach ensures that the codebase remains manageable as it grows and evolves with new features and improvements. diff --git a/docs/TESTING.md b/docs/TESTING.md new file mode 100644 index 0000000..8420dec --- /dev/null +++ b/docs/TESTING.md @@ -0,0 +1,168 @@ +# Testing Framework Documentation + +This document provides detailed information about the testing framework implemented for Export_Trakt_4_Letterboxd. + +## Overview + +The testing framework is designed to ensure code quality, prevent regressions, and make it easier to add new features. It consists of unit tests for core functions, integration tests for the export process, and a test runner script that can generate code coverage reports. + +## Testing Structure + +The tests are organized in the following directory structure: + +``` +tests/ +├── unit/ # Unit tests for individual functions +│ ├── config_test.bats # Tests for config.sh +│ ├── trakt_api_test.bats # Tests for trakt_api.sh +│ └── data_processing_test.bats # Tests for data_processing.sh +├── integration/ # Integration tests +│ └── export_process_test.bats # Test for complete export process +├── mocks/ # Mock API responses +│ ├── ratings.json # Mock ratings data +│ ├── history.json # Mock history data +│ ├── watchlist.json # Mock watchlist data +│ └── trakt_api_mock.sh # Mock API functions +├── helpers/ # Bats helper libraries +│ ├── bats-assert/ # Assertion library for Bats +│ ├── bats-support/ # Support functions for Bats +│ └── bats-file/ # File-related assertions for Bats +├── bats/ # Bats core test framework +├── data/ # Test data files +├── test_helper.bash # Common setup for all tests +└── run_tests.sh # Script to run tests and generate coverage reports +``` + +## Dependencies + +The testing framework relies on the following tools: + +1. **Bats (Bash Automated Testing System)**: A TAP-compliant testing framework for Bash +2. **jq**: A lightweight and flexible command-line JSON processor +3. **kcov** (optional): For generating code coverage reports + +## Installation + +The Bats testing framework and its helper libraries are installed as Git submodules. To initialize them: + +```bash +git submodule update --init --recursive +``` + +For jq and kcov, install them using your package manager: + +```bash +# Debian/Ubuntu +apt-get install jq kcov + +# macOS +brew install jq kcov +``` + +## Running Tests + +### Basic Test Run + +To run all tests: + +```bash +./tests/run_tests.sh +``` + +This will execute all unit and integration tests and provide a summary of the results. + +### Code Coverage Reports + +To generate a code coverage report: + +```bash +./tests/run_tests.sh coverage +``` + +The coverage report will be available in HTML format at `test-results/coverage/index.html`. + +## Writing Tests + +### Unit Tests + +Unit tests should test individual functions in isolation. Example: + +```bash +@test "function_name should do something specific" { + # Setup test environment + local input="test input" + local expected="expected output" + + # Run the function + run function_name "$input" + + # Assert the results + assert_success + assert_output "$expected" +} +``` + +### Integration Tests + +Integration tests should test the interaction between multiple components: + +```bash +@test "Integration: Export process should produce valid CSV files" { + # Setup the integration test environment + setup_integration_test + + # Run the export process + run ./export_script.sh + + # Verify the output + assert_success + assert_file_exists "output.csv" +} +``` + +## Mocking API Calls + +The framework includes mock functions for the Trakt API to avoid making real API calls during tests. To use the mock functions: + +```bash +# Load the mock API functions +source "${TESTS_DIR}/mocks/trakt_api_mock.sh" + +# Enable test mode +export TEST_MODE="true" + +# Now API calls will use mock data +``` + +## Test Helper Functions + +Common test helper functions are defined in `test_helper.bash`: + +- `setup()`: Called before each test to set up the test environment +- `teardown()`: Called after each test to clean up +- `create_mock_config()`: Creates a mock configuration file for testing +- `load_mock_response()`: Loads a mock API response + +## Continuous Integration + +The tests are automatically run in the GitHub Actions CI/CD pipeline for every pull request. The workflow is defined in `.github/workflows/docker-test.yml`. + +## Best Practices + +When writing tests, follow these best practices: + +1. **Isolation**: Tests should be independent of each other +2. **Cleanup**: Always clean up temporary files in the teardown function +3. **Mock External Dependencies**: Use mock functions for external APIs and services +4. **Test Edge Cases**: Include tests for error conditions and edge cases +5. **Keep Tests Fast**: Tests should run quickly to provide rapid feedback +6. **Descriptive Names**: Use descriptive test names that explain what is being tested + +## Troubleshooting + +Common issues and solutions: + +- **Test not found**: Ensure the test file is executable and follows the naming convention `*_test.bats` +- **Bats command not found**: Run `git submodule update --init --recursive` to install Bats +- **jq not found**: Install jq using your package manager +- **Coverage report not generated**: Install kcov and ensure it's in your PATH diff --git a/examples/scheduling.md b/examples/scheduling.md deleted file mode 100644 index eedaf8d..0000000 --- a/examples/scheduling.md +++ /dev/null @@ -1,258 +0,0 @@ -# Scheduling and Immediate Execution Examples - -This document provides practical examples of using the new `--run` and `--schedule` options introduced in Export Trakt 4 Letterboxd. - -## Overview - -The application now supports three execution modes: - -1. **Traditional Mode**: Using commands like `export`, `schedule`, etc. -2. **Immediate Execution Mode**: Using `--run` flag for one-time execution -3. **Scheduled Mode**: Using `--schedule` flag with cron expressions - -## Immediate Execution (`--run`) - -The `--run` flag executes the export immediately once and then exits. This is useful for: - -- One-time exports -- Testing configurations -- Manual exports triggered by external scripts -- CI/CD pipeline integrations - -### Examples - -```bash -# Export all data immediately with complete mode -./export_trakt --run --export all --mode complete - -# Export only watched movies with normal mode -./export_trakt --run --export watched --mode normal - -# Export collection with custom config file -./export_trakt --run --export collection --config custom_config.toml - -# Export ratings immediately -./export_trakt --run --export ratings --mode complete -``` - -## Scheduled Execution (`--schedule`) - -The `--schedule` flag sets up continuous execution according to a cron schedule. This is perfect for: - -- Automated backups -- Regular synchronization -- Unattended operation -- Server deployments - -### Cron Format - -The schedule uses standard cron format: `minute hour day-of-month month day-of-week` - -``` -* * * * * -| | | | | -| | | | +-- Day of week (0-7, Sunday=0 or 7) -| | | +------- Month (1-12) -| | +------------- Day of month (1-31) -| +------------------- Hour (0-23) -+------------------------- Minute (0-59) -``` - -### Schedule Examples - -#### Frequent Updates - -```bash -# Every 15 minutes (high-frequency monitoring) -./export_trakt --schedule "*/15 * * * *" --export watched --mode normal - -# Every hour at minute 0 -./export_trakt --schedule "0 * * * *" --export watched --mode normal - -# Every 6 hours -./export_trakt --schedule "0 */6 * * *" --export all --mode complete -``` - -#### Daily Schedules - -```bash -# Every day at 2:30 AM -./export_trakt --schedule "30 2 * * *" --export all --mode complete - -# Every day at noon -./export_trakt --schedule "0 12 * * *" --export watchlist --mode normal - -# Every day at 6:00 PM -./export_trakt --schedule "0 18 * * *" --export ratings --mode complete -``` - -#### Weekly Schedules - -```bash -# Every Monday at 9:00 AM -./export_trakt --schedule "0 9 * * 1" --export all --mode complete - -# Every Sunday at 3:00 AM (weekly backup) -./export_trakt --schedule "0 3 * * 0" --export all --mode complete - -# Every Friday at 5:00 PM -./export_trakt --schedule "0 17 * * 5" --export collection --mode normal -``` - -#### Monthly Schedules - -```bash -# First day of every month at midnight -./export_trakt --schedule "0 0 1 * *" --export all --mode complete - -# 15th of every month at 3:30 AM -./export_trakt --schedule "30 3 15 * *" --export all --mode complete -``` - -## Use Cases and Scenarios - -### Development and Testing - -```bash -# Quick test of configuration -./export_trakt --run --export watched --mode normal - -# Test all export types -./export_trakt --run --export all --mode complete -``` - -### Production Automation - -```bash -# Daily backup at 2:00 AM -./export_trakt --schedule "0 2 * * *" --export all --mode complete - -# Incremental updates every 4 hours -./export_trakt --schedule "0 */4 * * *" --export watched --mode normal -``` - -### Server Deployment - -```bash -# Background scheduler (using nohup) -nohup ./export_trakt --schedule "0 */6 * * *" --export all --mode complete > scheduler.log 2>&1 & - -# Systemd service with immediate start -./export_trakt --run --export all --mode complete && \ -./export_trakt --schedule "0 4 * * *" --export all --mode complete -``` - -### Docker Integration - -```bash -# Docker run with immediate execution -docker run --rm -v $(pwd)/config:/app/config \ - johandevl/export-trakt-4-letterboxd:latest \ - --run --export all --mode complete - -# Docker run with scheduling -docker run -d --name trakt-scheduler \ - -v $(pwd)/config:/app/config \ - -v $(pwd)/exports:/app/exports \ - johandevl/export-trakt-4-letterboxd:latest \ - --schedule "0 */6 * * *" --export all --mode complete -``` - -## Error Handling and Validation - -### Invalid Cron Expressions - -The application validates cron expressions and provides helpful error messages: - -```bash -# Invalid format -./export_trakt --schedule "invalid" --export watched -# Output: Invalid cron schedule format: invalid -# Error: expected exactly 5 fields, found 1: [invalid] -# Example formats: -# '0 */6 * * *' - Every 6 hours -# '0 9 * * 1' - Every Monday at 9:00 AM -# '30 14 * * *' - Every day at 2:30 PM -``` - -### Configuration Validation - -```bash -# Test configuration before scheduling -./export_trakt --run --export watched --mode normal -# If this succeeds, your configuration is valid for scheduling -``` - -## Monitoring and Logging - -### Viewing Scheduler Status - -When using `--schedule`, the application provides detailed logging: - -``` -INFO[2025-01-20T10:00:00Z] scheduler.started schedule="0 */6 * * *" next_run="2025-01-20T16:00:00Z" -INFO[2025-01-20T16:00:00Z] scheduler.executing_export export_type="all" export_mode="complete" -INFO[2025-01-20T16:05:00Z] export.completed_successfully export_type="all" export_mode="complete" -``` - -### Log Files - -Configure logging in your `config.toml`: - -```toml -[logging] -level = "info" -file = "logs/scheduler.log" -``` - -## Best Practices - -### 1. Start with Immediate Execution - -Test your configuration with `--run` before setting up scheduling: - -```bash -./export_trakt --run --export watched --mode normal -``` - -### 2. Use Appropriate Export Modes - -- `normal`: For frequent updates (every few hours) -- `complete`: For comprehensive backups (daily/weekly) - -### 3. Consider Resource Usage - -- More frequent exports consume more API calls -- Use `watched` type for frequent updates, `all` for comprehensive backups - -### 4. Monitor Scheduler Health - -- Check logs regularly -- Set up external monitoring if running in production -- Use process managers like systemd or Docker's restart policies - -### 5. Backup Configurations - -Always keep a backup of your working configuration files before making changes. - -## Troubleshooting - -### Common Issues - -1. **Invalid Configuration**: Test with `--run` first -2. **Wrong Cron Format**: Use online cron validators -3. **Permission Issues**: Ensure write access to export directory -4. **API Rate Limits**: Avoid very frequent schedules (less than 15 minutes) - -### Debugging Commands - -```bash -# Test immediate execution -./export_trakt --run --export watched --mode normal - -# Validate cron expression -./export_trakt --schedule "0 */6 * * *" --export watched --mode normal - -# Check configuration -./export_trakt validate -``` diff --git a/examples/test_scheduling.sh b/examples/test_scheduling.sh deleted file mode 100755 index 4e84e24..0000000 --- a/examples/test_scheduling.sh +++ /dev/null @@ -1,79 +0,0 @@ -#!/bin/bash - -# Test script for the new --run and --schedule features -# Export Trakt 4 Letterboxd - -echo "=== Export Trakt 4 Letterboxd - Scheduling Test Script ===" -echo "" - -# Build the application first -echo "Building the application..." -go build -o export_trakt ./cmd/export_trakt/ -if [ $? -ne 0 ]; then - echo "❌ Build failed!" - exit 1 -fi -echo "✅ Build successful!" -echo "" - -# Test 1: Show help to verify new options are available -echo "=== Test 1: Checking available options ===" -./export_trakt --help -echo "" - -# Test 2: Test invalid cron format validation -echo "=== Test 2: Testing cron format validation ===" -echo "Testing invalid cron format (should show error):" -./export_trakt --schedule "invalid-format" --export watched 2>&1 | head -10 -echo "" - -# Test 3: Test valid cron format (dry run) -echo "=== Test 3: Testing valid cron format validation ===" -echo "Testing valid cron format (should validate successfully):" -timeout 5s ./export_trakt --schedule "0 */6 * * *" --export watched 2>&1 & -PID=$! -sleep 2 -if kill -0 $PID 2>/dev/null; then - echo "✅ Scheduler started successfully (process running)" - kill $PID 2>/dev/null -else - echo "❌ Scheduler failed to start" -fi -echo "" - -# Test 4: Test immediate execution mode -echo "=== Test 4: Testing immediate execution mode ===" -echo "Testing --run flag (should execute once and exit):" -timeout 10s ./export_trakt --run --export watched --mode normal 2>&1 | head -5 -echo "✅ Immediate execution test completed" -echo "" - -# Test 5: Show different schedule examples -echo "=== Test 5: Schedule Examples ===" -echo "Here are some example schedules you can use:" -echo "" -echo "Every 6 hours:" -echo " ./export_trakt --schedule \"0 */6 * * *\" --export all --mode complete" -echo "" -echo "Every day at 2:30 AM:" -echo " ./export_trakt --schedule \"30 2 * * *\" --export all --mode complete" -echo "" -echo "Every Monday at 9:00 AM:" -echo " ./export_trakt --schedule \"0 9 * * 1\" --export collection --mode normal" -echo "" -echo "Every 15 minutes (high frequency):" -echo " ./export_trakt --schedule \"*/15 * * * *\" --export watched --mode normal" -echo "" - -echo "=== All tests completed! ===" -echo "" -echo "✅ The new --run and --schedule features are working correctly!" -echo "" -echo "Usage examples:" -echo " # Immediate execution:" -echo " ./export_trakt --run --export all --mode complete" -echo "" -echo " # Scheduled execution:" -echo " ./export_trakt --schedule \"0 */6 * * *\" --export all --mode complete" -echo "" -echo "For more examples, see: examples/scheduling.md" \ No newline at end of file diff --git a/exports/.gitkeep b/exports/.gitkeep deleted file mode 100644 index 13346f4..0000000 --- a/exports/.gitkeep +++ /dev/null @@ -1,2 +0,0 @@ -# This file keeps the exports directory in git while ignoring all export files -# Export files are generated dynamically and should not be committed \ No newline at end of file diff --git a/exports/README.md b/exports/README.md deleted file mode 100644 index b0b6ac8..0000000 --- a/exports/README.md +++ /dev/null @@ -1,22 +0,0 @@ -# Exports Directory - -This directory contains the exported data from Trakt.tv in Letterboxd-compatible format. - -## Structure - -Export files are automatically generated with timestamps: - -- `export_YYYY-MM-DD_HH-MM/` - Timestamped export directories -- Each export contains CSV files compatible with Letterboxd import - -## Usage - -Export files are created when running the application: - -```bash -./export_trakt --config ./config/config.toml -``` - -## Note - -Export files are automatically ignored by git as they contain personal data and are meant to be used locally or uploaded to Letterboxd manually. diff --git a/exports/de.po b/exports/de.po new file mode 100644 index 0000000..6be7445 --- /dev/null +++ b/exports/de.po @@ -0,0 +1,530 @@ +# Translation for Export Trakt 4 Letterboxd +# Copyright (C) 2025 Export Trakt 4 Letterboxd +# This file is distributed under the same license as the Export Trakt 4 Letterboxd package. +# +msgid "" +msgstr "" +"Project-Id-Version: Export Trakt 4 Letterboxd 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2025-03-24 22:44+0100\n" +"PO-Revision-Date: 2025-03-24 22:44+0100\n" +"Last-Translator: Automatic export\n" +"Language-Team: de\n" +"Language: de\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: MSG_ABORT +msgid "Abort" +msgstr "Abbrechen" + +#: MSG_ACCESS_TOKEN_CHECK +msgid "Access token check" +msgstr "" + +#: MSG_ACCESS_TOKEN_FOUND +msgid "Access token found" +msgstr "" + +#: MSG_ACCESS_TOKEN_NOT_FOUND +msgid "Access token not found" +msgstr "" + +#: MSG_API_AUTH_FAILURE +msgid "Authentication failed" +msgstr "Authentifizierung fehlgeschlagen" + +#: MSG_API_AUTH_REQUIRED +msgid "Authentication required" +msgstr "Authentifizierung erforderlich" + +#: MSG_API_AUTH_SUCCESS +msgid "Authentication successful" +msgstr "Authentifizierung erfolgreich" + +#: MSG_API_ERROR +msgid "API error" +msgstr "API-Fehler" + +#: MSG_API_KEY_CHECK +msgid "API key check" +msgstr "" + +#: MSG_API_KEY_FOUND +msgid "API key found" +msgstr "" + +#: MSG_API_KEY_NOT_FOUND +msgid "API key not found" +msgstr "" + +#: MSG_API_LIMIT +msgid "API limit reached" +msgstr "API-Limit erreicht" + +#: MSG_API_REQUEST +msgid "API request" +msgstr "API-Anfrage" + +#: MSG_API_RESPONSE +msgid "API response" +msgstr "API-Antwort" + +#: MSG_API_RETRY +msgid "Retry" +msgstr "Wiederholung" + +#: MSG_API_SECRET_CHECK +msgid "API secret check" +msgstr "" + +#: MSG_API_SECRET_FOUND +msgid "API secret found" +msgstr "" + +#: MSG_API_SECRET_NOT_FOUND +msgid "API secret not found" +msgstr "" + +#: MSG_API_WAIT +msgid "Waiting before next request" +msgstr "Warten vor der nächsten Anfrage" + +#: MSG_AUTO_DETECTED +msgid "auto-detected" +msgstr "automatisch erkannt" + +#: MSG_BACKUP_DIRECTORY +msgid "Backup directory" +msgstr "" + +#: MSG_BACKUP_DIRECTORY_EXISTS +msgid "Backup directory exists" +msgstr "" + +#: MSG_BACKUP_DIRECTORY_NOT_WRITABLE +msgid "WARNING: Backup directory is not writable. Check permissions." +msgstr "" + +#: MSG_BACKUP_DIRECTORY_WRITABLE +msgid "Backup directory is writable" +msgstr "" + +#: MSG_CANCEL +msgid "Cancel" +msgstr "Abbrechen" + +#: MSG_CHECKING_DEPENDENCIES +msgid "Checking required dependencies" +msgstr "" + +#: MSG_CONFIG_CREATED +msgid "Configuration file created" +msgstr "Konfigurationsdatei erstellt" + +#: MSG_CONFIG_DEFAULT +msgid "Default configuration" +msgstr "Standardkonfiguration" + +#: MSG_CONFIG_ERROR +msgid "Error in configuration file" +msgstr "Fehler in der Konfigurationsdatei" + +#: MSG_CONFIG_LOADED +msgid "Configuration loaded" +msgstr "Konfiguration geladen" + +#: MSG_CONFIG_LOADING +msgid "Loading configuration" +msgstr "Konfiguration wird geladen" + +#: MSG_CONFIG_MISSING +msgid "Configuration missing" +msgstr "Konfiguration fehlt" + +#: MSG_CONFIG_NOT_FOUND +msgid "Configuration file not found" +msgstr "Konfigurationsdatei nicht gefunden" + +#: MSG_CONFIG_SAVED +msgid "Configuration saved" +msgstr "Konfiguration gespeichert" + +#: MSG_CONFIG_SAVING +msgid "Saving configuration" +msgstr "Konfiguration wird gespeichert" + +#: MSG_CONFIG_UPDATED +msgid "Configuration updated" +msgstr "Konfiguration aktualisiert" + +#: MSG_CONFIRM +msgid "Confirm" +msgstr "Bestätigen" + +#: MSG_CONTINUE +msgid "Continue" +msgstr "Fortfahren" + +#: MSG_COPY_DIRECTORY +msgid "Copy directory" +msgstr "" + +#: MSG_CREATED_BACKUP_DIRECTORY +msgid "Created backup directory" +msgstr "" + +#: MSG_DIRECTORY_CREATED +msgid "Directory created" +msgstr "Verzeichnis erstellt" + +#: MSG_DIRECTORY_EXISTS +msgid "Directory exists" +msgstr "" + +#: MSG_DIRECTORY_NOT_FOUND +msgid "Directory not found" +msgstr "Verzeichnis nicht gefunden" + +#: MSG_DIRECTORY_PERMISSIONS +msgid "Directory permissions" +msgstr "" + +#: MSG_DONE +msgid "Done" +msgstr "Erledigt" + +#: MSG_ENVIRONMENT_INFO +msgid "Environment information" +msgstr "" + +#: MSG_ERROR +msgid "Error" +msgstr "Fehler" + +#: MSG_ERROR_MISSING_LANG_FILE +msgid "Error: Language file not found. Using English defaults." +msgstr "Fehler: Sprachdatei nicht gefunden. Englische Standardwerte werden verwendet." + +#: MSG_EXISTING_CSV_CHECK +msgid "Existing CSV file check" +msgstr "" + +#: MSG_EXIT +msgid "Exit" +msgstr "Beenden" + +#: MSG_EXPORT_COMPLETE +msgid "Export completed" +msgstr "Export abgeschlossen" + +#: MSG_EXPORT_FAILED +msgid "Export process failed" +msgstr "Exportvorgang fehlgeschlagen" + +#: MSG_EXPORT_FILE_CREATED +msgid "Export file created: %s" +msgstr "Exportdatei erstellt: %s" + +#: MSG_EXPORT_FINISHED +msgid "Export process completed" +msgstr "Exportvorgang abgeschlossen" + +#: MSG_EXPORT_FORMATTING +msgid "Formatting export data" +msgstr "Exportdaten werden formatiert" + +#: MSG_EXPORT_GENERATING +msgid "Generating export file" +msgstr "Exportdatei wird generiert" + +#: MSG_EXPORT_NO_DATA +msgid "No data to export" +msgstr "Keine Daten zum Exportieren" + +#: MSG_EXPORT_PROCESSING +msgid "Processing export data" +msgstr "Exportdaten werden verarbeitet" + +#: MSG_EXPORT_SAVING +msgid "Saving export file" +msgstr "Exportdatei wird gespeichert" + +#: MSG_EXPORT_START +msgid "Starting export" +msgstr "Export wird gestartet" + +#: MSG_EXPORT_STARTING +msgid "Starting export process" +msgstr "Exportvorgang wird gestartet" + +#: MSG_EXPORT_SUMMARY +msgid "Export summary" +msgstr "Exportzusammenfassung" + +#: MSG_FAILED +msgid "Failed" +msgstr "Fehlgeschlagen" + +#: MSG_FILE_CREATED +msgid "File created: %s" +msgstr "Datei erstellt: %s" + +#: MSG_FILE_DELETED +msgid "File deleted: %s" +msgstr "Datei gelöscht: %s" + +#: MSG_FILE_EXISTS +msgid "File exists" +msgstr "" + +#: MSG_FILE_EXISTS_NOT +msgid "File not found" +msgstr "" + +#: MSG_FILE_HAS_CONTENT +msgid "File has content" +msgstr "" + +#: MSG_FILE_IS_READABLE +msgid "File is readable" +msgstr "" + +#: MSG_FILE_IS_WRITABLE +msgid "File is writable" +msgstr "" + +#: MSG_FILE_NOT_FOUND +msgid "File not found: %s" +msgstr "Datei nicht gefunden: %s" + +#: MSG_FILE_PERMISSION_DENIED +msgid "Permission denied for file: %s" +msgstr "Zugriff auf Datei verweigert: %s" + +#: MSG_FILE_READ_ERROR +msgid "File read error" +msgstr "Fehler beim Lesen der Datei" + +#: MSG_FILE_UPDATED +msgid "File updated: %s" +msgstr "Datei aktualisiert: %s" + +#: MSG_FILE_WRITE_ERROR +msgid "File write error" +msgstr "Fehler beim Schreiben der Datei" + +#: MSG_GOODBYE +msgid "Goodbye" +msgstr "Auf Wiedersehen" + +#: MSG_HELLO +msgid "Hello" +msgstr "Hallo" + +#: MSG_HELP +msgid "Help" +msgstr "Hilfe" + +#: MSG_INFO +msgid "Information" +msgstr "Information" + +#: MSG_INVALID_OPTION +msgid "Invalid option" +msgstr "Ungültige Option" + +#: MSG_LANGUAGE_SET +msgid "Language set to" +msgstr "Sprache eingestellt auf" + +#: MSG_LOG_DIRECTORY +msgid "Log directory" +msgstr "" + +#: MSG_MISSING_DEPENDENCIES +msgid "Some required dependencies are missing. Please install them before continuing." +msgstr "" + +#: MSG_NO +msgid "No" +msgstr "Nein" + +#: MSG_NONE +msgid "none" +msgstr "keine" + +#: MSG_NO_OPTION +msgid "No option provided, using default" +msgstr "" + +#: MSG_OS_TYPE +msgid "OS Type" +msgstr "" + +#: MSG_PLEASE_WAIT +msgid "Please wait" +msgstr "Bitte warten" + +#: MSG_PROCESSING +msgid "Processing" +msgstr "Verarbeitung läuft" + +#: MSG_REFRESH_TOKEN_CHECK +msgid "Refresh token check" +msgstr "" + +#: MSG_REFRESH_TOKEN_FOUND +msgid "Refresh token found" +msgstr "" + +#: MSG_REFRESH_TOKEN_NOT_FOUND +msgid "Refresh token not found" +msgstr "" + +#: MSG_RETRIEVING_INFO +msgid "Retrieving information" +msgstr "" + +#: MSG_RUNNING_DOCKER +msgid "Running in Docker container" +msgstr "Läuft in Docker-Container" + +#: MSG_RUNNING_IN +msgid "running on" +msgstr "läuft auf" + +#: MSG_SCRIPT_COMPLETE +msgid "Script execution completed successfully" +msgstr "" + +#: MSG_SCRIPT_DIRECTORY +msgid "Script directory" +msgstr "" + +#: MSG_SCRIPT_ERROR +msgid "An error occurred while running the script" +msgstr "Bei der Ausführung des Skripts ist ein Fehler aufgetreten" + +#: MSG_SCRIPT_EXECUTION_END +msgid "Script execution ended" +msgstr "Skriptausführung beendet" + +#: MSG_SCRIPT_EXECUTION_START +msgid "Script execution started" +msgstr "Skriptausführung gestartet" + +#: MSG_SCRIPT_FINISHED +msgid "Export Trakt 4 Letterboxd script finished" +msgstr "Export Trakt 4 Letterboxd Skript beendet" + +#: MSG_SCRIPT_INTERRUPTED +msgid "Script interrupted by user" +msgstr "Skript vom Benutzer unterbrochen" + +#: MSG_SCRIPT_OPTION +msgid "Script option" +msgstr "Skriptoption" + +#: MSG_SCRIPT_STARTING +msgid "Starting Export Trakt 4 Letterboxd script" +msgstr "Export Trakt 4 Letterboxd Skript wird gestartet" + +#: MSG_STARTING +msgid "Starting" +msgstr "Starten" + +#: MSG_SUCCESS +msgid "Success" +msgstr "Erfolgreich" + +#: MSG_TRAKT_API_ERROR +msgid "Error connecting to Trakt API" +msgstr "Fehler bei der Verbindung zur Trakt API" + +#: MSG_TRAKT_API_RATE_LIMIT +msgid "Trakt API rate limit reached, waiting..." +msgstr "Trakt API-Ratenlimit erreicht, warte..." + +#: MSG_TRAKT_AUTH_FAILED +msgid "Trakt authentication failed" +msgstr "Trakt-Authentifizierung fehlgeschlagen" + +#: MSG_TRAKT_AUTH_REQUIRED +msgid "Trakt authentication required" +msgstr "Trakt-Authentifizierung erforderlich" + +#: MSG_TRAKT_AUTH_SUCCESS +msgid "Trakt authentication successful" +msgstr "Trakt-Authentifizierung erfolgreich" + +#: MSG_TRANSLATION_ERROR +msgid "Translation error" +msgstr "Übersetzungsfehler" + +#: MSG_TRANSLATION_LOADED +msgid "Translation loaded" +msgstr "Übersetzung geladen" + +#: MSG_TRANSLATION_MISSING +msgid "Translation missing" +msgstr "Übersetzung fehlt" + +#: MSG_TRANSLATION_UPDATED +msgid "Translation updated" +msgstr "Übersetzung aktualisiert" + +#: MSG_USER +msgid "User" +msgstr "" + +#: MSG_USER_CONFIRM +msgid "Do you want to continue? (y/N)" +msgstr "Möchten Sie fortfahren? (j/N)" + +#: MSG_USER_CONFIRMATION +msgid "User confirmation" +msgstr "Benutzerbestätigung" + +#: MSG_USER_INPUT +msgid "User input" +msgstr "Benutzereingabe" + +#: MSG_USER_INPUT_REQUIRED +msgid "Please provide input" +msgstr "Bitte geben Sie eine Eingabe ein" + +#: MSG_USER_INVALID_INPUT +msgid "Invalid input, please try again" +msgstr "Ungültige Eingabe, bitte versuchen Sie es erneut" + +#: MSG_USER_PROMPT +msgid "User prompt" +msgstr "Benutzeraufforderung" + +#: MSG_USER_SELECTION +msgid "User selection" +msgstr "Benutzerauswahl" + +#: MSG_WARNING +msgid "Warning" +msgstr "Warnung" + +#: MSG_WELCOME +msgid "Welcome to Export Trakt 4 Letterboxd" +msgstr "Willkommen bei Export Trakt 4 Letterboxd" + +#: MSG_WORKING_DIRECTORY +msgid "Working directory" +msgstr "" + +#: MSG_YES +msgid "Yes" +msgstr "Ja" + +#: MSG_all_dependencies_installed +msgid "All required dependencies are installed." +msgstr "Alle erforderlichen Abhängigkeiten sind installiert." + diff --git a/exports/es.po b/exports/es.po new file mode 100644 index 0000000..3685d27 --- /dev/null +++ b/exports/es.po @@ -0,0 +1,530 @@ +# Translation for Export Trakt 4 Letterboxd +# Copyright (C) 2025 Export Trakt 4 Letterboxd +# This file is distributed under the same license as the Export Trakt 4 Letterboxd package. +# +msgid "" +msgstr "" +"Project-Id-Version: Export Trakt 4 Letterboxd 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2025-03-24 22:44+0100\n" +"PO-Revision-Date: 2025-03-24 22:44+0100\n" +"Last-Translator: Automatic export\n" +"Language-Team: es\n" +"Language: es\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: MSG_ABORT +msgid "Abort" +msgstr "Abortar" + +#: MSG_ACCESS_TOKEN_CHECK +msgid "Access token check" +msgstr "" + +#: MSG_ACCESS_TOKEN_FOUND +msgid "Access token found" +msgstr "" + +#: MSG_ACCESS_TOKEN_NOT_FOUND +msgid "Access token not found" +msgstr "" + +#: MSG_API_AUTH_FAILURE +msgid "Authentication failed" +msgstr "Autenticación fallida" + +#: MSG_API_AUTH_REQUIRED +msgid "Authentication required" +msgstr "Autenticación requerida" + +#: MSG_API_AUTH_SUCCESS +msgid "Authentication successful" +msgstr "Autenticación exitosa" + +#: MSG_API_ERROR +msgid "API error" +msgstr "Error API" + +#: MSG_API_KEY_CHECK +msgid "API key check" +msgstr "" + +#: MSG_API_KEY_FOUND +msgid "API key found" +msgstr "" + +#: MSG_API_KEY_NOT_FOUND +msgid "API key not found" +msgstr "" + +#: MSG_API_LIMIT +msgid "API limit reached" +msgstr "Límite API alcanzado" + +#: MSG_API_REQUEST +msgid "API request" +msgstr "Solicitud API" + +#: MSG_API_RESPONSE +msgid "API response" +msgstr "Respuesta API" + +#: MSG_API_RETRY +msgid "Retry" +msgstr "Reintento" + +#: MSG_API_SECRET_CHECK +msgid "API secret check" +msgstr "" + +#: MSG_API_SECRET_FOUND +msgid "API secret found" +msgstr "" + +#: MSG_API_SECRET_NOT_FOUND +msgid "API secret not found" +msgstr "" + +#: MSG_API_WAIT +msgid "Waiting before next request" +msgstr "Esperando antes de la próxima solicitud" + +#: MSG_AUTO_DETECTED +msgid "auto-detected" +msgstr "auto-detectado" + +#: MSG_BACKUP_DIRECTORY +msgid "Backup directory" +msgstr "" + +#: MSG_BACKUP_DIRECTORY_EXISTS +msgid "Backup directory exists" +msgstr "" + +#: MSG_BACKUP_DIRECTORY_NOT_WRITABLE +msgid "WARNING: Backup directory is not writable. Check permissions." +msgstr "" + +#: MSG_BACKUP_DIRECTORY_WRITABLE +msgid "Backup directory is writable" +msgstr "" + +#: MSG_CANCEL +msgid "Cancel" +msgstr "Cancelar" + +#: MSG_CHECKING_DEPENDENCIES +msgid "Checking required dependencies" +msgstr "" + +#: MSG_CONFIG_CREATED +msgid "Configuration file created" +msgstr "Archivo de configuración creado" + +#: MSG_CONFIG_DEFAULT +msgid "Default configuration" +msgstr "Configuración predeterminada" + +#: MSG_CONFIG_ERROR +msgid "Error in configuration file" +msgstr "Error en el archivo de configuración" + +#: MSG_CONFIG_LOADED +msgid "Configuration loaded" +msgstr "Configuración cargada" + +#: MSG_CONFIG_LOADING +msgid "Loading configuration" +msgstr "Cargando configuración" + +#: MSG_CONFIG_MISSING +msgid "Configuration missing" +msgstr "Configuración faltante" + +#: MSG_CONFIG_NOT_FOUND +msgid "Configuration file not found" +msgstr "Archivo de configuración no encontrado" + +#: MSG_CONFIG_SAVED +msgid "Configuration saved" +msgstr "Configuración guardada" + +#: MSG_CONFIG_SAVING +msgid "Saving configuration" +msgstr "Guardando configuración" + +#: MSG_CONFIG_UPDATED +msgid "Configuration updated" +msgstr "Configuración actualizada" + +#: MSG_CONFIRM +msgid "Confirm" +msgstr "Confirmar" + +#: MSG_CONTINUE +msgid "Continue" +msgstr "Continuar" + +#: MSG_COPY_DIRECTORY +msgid "Copy directory" +msgstr "" + +#: MSG_CREATED_BACKUP_DIRECTORY +msgid "Created backup directory" +msgstr "" + +#: MSG_DIRECTORY_CREATED +msgid "Directory created" +msgstr "Directorio creado" + +#: MSG_DIRECTORY_EXISTS +msgid "Directory exists" +msgstr "" + +#: MSG_DIRECTORY_NOT_FOUND +msgid "Directory not found" +msgstr "Directorio no encontrado" + +#: MSG_DIRECTORY_PERMISSIONS +msgid "Directory permissions" +msgstr "" + +#: MSG_DONE +msgid "Done" +msgstr "Hecho" + +#: MSG_ENVIRONMENT_INFO +msgid "Environment information" +msgstr "" + +#: MSG_ERROR +msgid "Error" +msgstr "Error" + +#: MSG_ERROR_MISSING_LANG_FILE +msgid "Error: Language file not found. Using English defaults." +msgstr "Error: Archivo de idioma no encontrado. Usando valores predeterminados en inglés." + +#: MSG_EXISTING_CSV_CHECK +msgid "Existing CSV file check" +msgstr "" + +#: MSG_EXIT +msgid "Exit" +msgstr "Salir" + +#: MSG_EXPORT_COMPLETE +msgid "Export completed" +msgstr "Exportación completada" + +#: MSG_EXPORT_FAILED +msgid "Export process failed" +msgstr "Proceso de exportación fallido" + +#: MSG_EXPORT_FILE_CREATED +msgid "Export file created: %s" +msgstr "Archivo de exportación creado: %s" + +#: MSG_EXPORT_FINISHED +msgid "Export process completed" +msgstr "Proceso de exportación completado" + +#: MSG_EXPORT_FORMATTING +msgid "Formatting export data" +msgstr "Formateando datos de exportación" + +#: MSG_EXPORT_GENERATING +msgid "Generating export file" +msgstr "Generando archivo de exportación" + +#: MSG_EXPORT_NO_DATA +msgid "No data to export" +msgstr "No hay datos para exportar" + +#: MSG_EXPORT_PROCESSING +msgid "Processing export data" +msgstr "Procesando datos de exportación" + +#: MSG_EXPORT_SAVING +msgid "Saving export file" +msgstr "Guardando archivo de exportación" + +#: MSG_EXPORT_START +msgid "Starting export" +msgstr "Iniciando exportación" + +#: MSG_EXPORT_STARTING +msgid "Starting export process" +msgstr "Iniciando proceso de exportación" + +#: MSG_EXPORT_SUMMARY +msgid "Export summary" +msgstr "Resumen de exportación" + +#: MSG_FAILED +msgid "Failed" +msgstr "Fallido" + +#: MSG_FILE_CREATED +msgid "File created: %s" +msgstr "Archivo creado: %s" + +#: MSG_FILE_DELETED +msgid "File deleted: %s" +msgstr "Archivo eliminado: %s" + +#: MSG_FILE_EXISTS +msgid "File exists" +msgstr "" + +#: MSG_FILE_EXISTS_NOT +msgid "File not found" +msgstr "" + +#: MSG_FILE_HAS_CONTENT +msgid "File has content" +msgstr "" + +#: MSG_FILE_IS_READABLE +msgid "File is readable" +msgstr "" + +#: MSG_FILE_IS_WRITABLE +msgid "File is writable" +msgstr "" + +#: MSG_FILE_NOT_FOUND +msgid "File not found: %s" +msgstr "Archivo no encontrado: %s" + +#: MSG_FILE_PERMISSION_DENIED +msgid "Permission denied for file: %s" +msgstr "Permiso denegado para el archivo: %s" + +#: MSG_FILE_READ_ERROR +msgid "File read error" +msgstr "Error de lectura de archivo" + +#: MSG_FILE_UPDATED +msgid "File updated: %s" +msgstr "Archivo actualizado: %s" + +#: MSG_FILE_WRITE_ERROR +msgid "File write error" +msgstr "Error de escritura de archivo" + +#: MSG_GOODBYE +msgid "Goodbye" +msgstr "Adiós" + +#: MSG_HELLO +msgid "Hello" +msgstr "Hola" + +#: MSG_HELP +msgid "Help" +msgstr "Ayuda" + +#: MSG_INFO +msgid "Information" +msgstr "Información" + +#: MSG_INVALID_OPTION +msgid "Invalid option" +msgstr "Opción inválida" + +#: MSG_LANGUAGE_SET +msgid "Language set to" +msgstr "Idioma establecido a" + +#: MSG_LOG_DIRECTORY +msgid "Log directory" +msgstr "" + +#: MSG_MISSING_DEPENDENCIES +msgid "Some required dependencies are missing. Please install them before continuing." +msgstr "" + +#: MSG_NO +msgid "No" +msgstr "No" + +#: MSG_NONE +msgid "none" +msgstr "ninguna" + +#: MSG_NO_OPTION +msgid "No option provided, using default" +msgstr "" + +#: MSG_OS_TYPE +msgid "OS Type" +msgstr "" + +#: MSG_PLEASE_WAIT +msgid "Please wait" +msgstr "Por favor espere" + +#: MSG_PROCESSING +msgid "Processing" +msgstr "Procesando" + +#: MSG_REFRESH_TOKEN_CHECK +msgid "Refresh token check" +msgstr "" + +#: MSG_REFRESH_TOKEN_FOUND +msgid "Refresh token found" +msgstr "" + +#: MSG_REFRESH_TOKEN_NOT_FOUND +msgid "Refresh token not found" +msgstr "" + +#: MSG_RETRIEVING_INFO +msgid "Retrieving information" +msgstr "" + +#: MSG_RUNNING_DOCKER +msgid "Running in Docker container" +msgstr "Ejecutando en contenedor Docker" + +#: MSG_RUNNING_IN +msgid "running on" +msgstr "ejecutándose en" + +#: MSG_SCRIPT_COMPLETE +msgid "Script execution completed successfully" +msgstr "" + +#: MSG_SCRIPT_DIRECTORY +msgid "Script directory" +msgstr "" + +#: MSG_SCRIPT_ERROR +msgid "An error occurred while running the script" +msgstr "Se produjo un error durante la ejecución del script" + +#: MSG_SCRIPT_EXECUTION_END +msgid "Script execution ended" +msgstr "Ejecución del script finalizada" + +#: MSG_SCRIPT_EXECUTION_START +msgid "Script execution started" +msgstr "Ejecución del script iniciada" + +#: MSG_SCRIPT_FINISHED +msgid "Export Trakt 4 Letterboxd script finished" +msgstr "Script Export Trakt 4 Letterboxd finalizado" + +#: MSG_SCRIPT_INTERRUPTED +msgid "Script interrupted by user" +msgstr "Script interrumpido por el usuario" + +#: MSG_SCRIPT_OPTION +msgid "Script option" +msgstr "Opción del script" + +#: MSG_SCRIPT_STARTING +msgid "Starting Export Trakt 4 Letterboxd script" +msgstr "Iniciando script Export Trakt 4 Letterboxd" + +#: MSG_STARTING +msgid "Starting" +msgstr "Iniciando" + +#: MSG_SUCCESS +msgid "Success" +msgstr "Éxito" + +#: MSG_TRAKT_API_ERROR +msgid "Error connecting to Trakt API" +msgstr "Error al conectar con la API de Trakt" + +#: MSG_TRAKT_API_RATE_LIMIT +msgid "Trakt API rate limit reached, waiting..." +msgstr "Límite de tasa de la API de Trakt alcanzado, esperando..." + +#: MSG_TRAKT_AUTH_FAILED +msgid "Trakt authentication failed" +msgstr "Autenticación de Trakt fallida" + +#: MSG_TRAKT_AUTH_REQUIRED +msgid "Trakt authentication required" +msgstr "Se requiere autenticación de Trakt" + +#: MSG_TRAKT_AUTH_SUCCESS +msgid "Trakt authentication successful" +msgstr "Autenticación de Trakt exitosa" + +#: MSG_TRANSLATION_ERROR +msgid "Translation error" +msgstr "Error de traducción" + +#: MSG_TRANSLATION_LOADED +msgid "Translation loaded" +msgstr "Traducción cargada" + +#: MSG_TRANSLATION_MISSING +msgid "Translation missing" +msgstr "Traducción faltante" + +#: MSG_TRANSLATION_UPDATED +msgid "Translation updated" +msgstr "Traducción actualizada" + +#: MSG_USER +msgid "User" +msgstr "" + +#: MSG_USER_CONFIRM +msgid "Do you want to continue? (y/N)" +msgstr "¿Desea continuar? (s/N)" + +#: MSG_USER_CONFIRMATION +msgid "User confirmation" +msgstr "Confirmación del usuario" + +#: MSG_USER_INPUT +msgid "User input" +msgstr "Entrada del usuario" + +#: MSG_USER_INPUT_REQUIRED +msgid "Please provide input" +msgstr "Por favor, proporcione una entrada" + +#: MSG_USER_INVALID_INPUT +msgid "Invalid input, please try again" +msgstr "Entrada inválida, por favor intente de nuevo" + +#: MSG_USER_PROMPT +msgid "User prompt" +msgstr "Solicitud al usuario" + +#: MSG_USER_SELECTION +msgid "User selection" +msgstr "Selección del usuario" + +#: MSG_WARNING +msgid "Warning" +msgstr "Advertencia" + +#: MSG_WELCOME +msgid "Welcome to Export Trakt 4 Letterboxd" +msgstr "Bienvenido a Export Trakt 4 Letterboxd" + +#: MSG_WORKING_DIRECTORY +msgid "Working directory" +msgstr "" + +#: MSG_YES +msgid "Yes" +msgstr "Sí" + +#: MSG_all_dependencies_installed +msgid "All required dependencies are installed." +msgstr "Todas las dependencias requeridas están instaladas." + diff --git a/exports/fr.po b/exports/fr.po new file mode 100644 index 0000000..bfa6c71 --- /dev/null +++ b/exports/fr.po @@ -0,0 +1,530 @@ +# Translation for Export Trakt 4 Letterboxd +# Copyright (C) 2025 Export Trakt 4 Letterboxd +# This file is distributed under the same license as the Export Trakt 4 Letterboxd package. +# +msgid "" +msgstr "" +"Project-Id-Version: Export Trakt 4 Letterboxd 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2025-03-24 22:44+0100\n" +"PO-Revision-Date: 2025-03-24 22:44+0100\n" +"Last-Translator: Automatic export\n" +"Language-Team: fr\n" +"Language: fr\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: MSG_ABORT +msgid "Abort" +msgstr "ABANDON" + +#: MSG_ACCESS_TOKEN_CHECK +msgid "Access token check" +msgstr "" + +#: MSG_ACCESS_TOKEN_FOUND +msgid "Access token found" +msgstr "" + +#: MSG_ACCESS_TOKEN_NOT_FOUND +msgid "Access token not found" +msgstr "" + +#: MSG_API_AUTH_FAILURE +msgid "Authentication failed" +msgstr "Échec d'authentification" + +#: MSG_API_AUTH_REQUIRED +msgid "Authentication required" +msgstr "Authentification requise" + +#: MSG_API_AUTH_SUCCESS +msgid "Authentication successful" +msgstr "Authentification réussie" + +#: MSG_API_ERROR +msgid "API error" +msgstr "Erreur API" + +#: MSG_API_KEY_CHECK +msgid "API key check" +msgstr "" + +#: MSG_API_KEY_FOUND +msgid "API key found" +msgstr "" + +#: MSG_API_KEY_NOT_FOUND +msgid "API key not found" +msgstr "" + +#: MSG_API_LIMIT +msgid "API limit reached" +msgstr "Limite d'API atteinte" + +#: MSG_API_REQUEST +msgid "API request" +msgstr "Requête API" + +#: MSG_API_RESPONSE +msgid "API response" +msgstr "Réponse API" + +#: MSG_API_RETRY +msgid "Retry" +msgstr "Nouvelle tentative" + +#: MSG_API_SECRET_CHECK +msgid "API secret check" +msgstr "" + +#: MSG_API_SECRET_FOUND +msgid "API secret found" +msgstr "" + +#: MSG_API_SECRET_NOT_FOUND +msgid "API secret not found" +msgstr "" + +#: MSG_API_WAIT +msgid "Waiting before next request" +msgstr "Attente avant la prochaine requête" + +#: MSG_AUTO_DETECTED +msgid "auto-detected" +msgstr "détection automatique" + +#: MSG_BACKUP_DIRECTORY +msgid "Backup directory" +msgstr "" + +#: MSG_BACKUP_DIRECTORY_EXISTS +msgid "Backup directory exists" +msgstr "" + +#: MSG_BACKUP_DIRECTORY_NOT_WRITABLE +msgid "WARNING: Backup directory is not writable. Check permissions." +msgstr "" + +#: MSG_BACKUP_DIRECTORY_WRITABLE +msgid "Backup directory is writable" +msgstr "" + +#: MSG_CANCEL +msgid "Cancel" +msgstr "ANNULER" + +#: MSG_CHECKING_DEPENDENCIES +msgid "Checking required dependencies" +msgstr "" + +#: MSG_CONFIG_CREATED +msgid "Configuration file created" +msgstr "Fichier de configuration créé" + +#: MSG_CONFIG_DEFAULT +msgid "Default configuration" +msgstr "Configuration par défaut" + +#: MSG_CONFIG_ERROR +msgid "Error in configuration file" +msgstr "Erreur dans le fichier de configuration" + +#: MSG_CONFIG_LOADED +msgid "Configuration loaded" +msgstr "Configuration chargée" + +#: MSG_CONFIG_LOADING +msgid "Loading configuration" +msgstr "Chargement de la configuration" + +#: MSG_CONFIG_MISSING +msgid "Configuration missing" +msgstr "Configuration manquante" + +#: MSG_CONFIG_NOT_FOUND +msgid "Configuration file not found" +msgstr "Fichier de configuration introuvable" + +#: MSG_CONFIG_SAVED +msgid "Configuration saved" +msgstr "Configuration enregistrée" + +#: MSG_CONFIG_SAVING +msgid "Saving configuration" +msgstr "Enregistrement de la configuration" + +#: MSG_CONFIG_UPDATED +msgid "Configuration updated" +msgstr "Configuration mise à jour" + +#: MSG_CONFIRM +msgid "Confirm" +msgstr "CONFIRMER" + +#: MSG_CONTINUE +msgid "Continue" +msgstr "CONTINUER" + +#: MSG_COPY_DIRECTORY +msgid "Copy directory" +msgstr "" + +#: MSG_CREATED_BACKUP_DIRECTORY +msgid "Created backup directory" +msgstr "" + +#: MSG_DIRECTORY_CREATED +msgid "Directory created" +msgstr "Répertoire créé" + +#: MSG_DIRECTORY_EXISTS +msgid "Directory exists" +msgstr "" + +#: MSG_DIRECTORY_NOT_FOUND +msgid "Directory not found" +msgstr "Répertoire non trouvé" + +#: MSG_DIRECTORY_PERMISSIONS +msgid "Directory permissions" +msgstr "" + +#: MSG_DONE +msgid "Done" +msgstr "TERMINÉ" + +#: MSG_ENVIRONMENT_INFO +msgid "Environment information" +msgstr "" + +#: MSG_ERROR +msgid "Error" +msgstr "ERREUR" + +#: MSG_ERROR_MISSING_LANG_FILE +msgid "Error: Language file not found. Using English defaults." +msgstr "Erreur : Fichier de langue introuvable. Utilisation des valeurs par défaut en anglais." + +#: MSG_EXISTING_CSV_CHECK +msgid "Existing CSV file check" +msgstr "" + +#: MSG_EXIT +msgid "Exit" +msgstr "QUITTER" + +#: MSG_EXPORT_COMPLETE +msgid "Export completed" +msgstr "Exportation terminée" + +#: MSG_EXPORT_FAILED +msgid "Export process failed" +msgstr "Échec du processus d'exportation" + +#: MSG_EXPORT_FILE_CREATED +msgid "Export file created: %s" +msgstr "Fichier d'export créé : %s" + +#: MSG_EXPORT_FINISHED +msgid "Export process completed" +msgstr "Processus d'exportation terminé" + +#: MSG_EXPORT_FORMATTING +msgid "Formatting export data" +msgstr "Formatage des données d'exportation" + +#: MSG_EXPORT_GENERATING +msgid "Generating export file" +msgstr "Génération du fichier d'exportation" + +#: MSG_EXPORT_NO_DATA +msgid "No data to export" +msgstr "Aucune donnée à exporter" + +#: MSG_EXPORT_PROCESSING +msgid "Processing export data" +msgstr "Traitement des données d'exportation" + +#: MSG_EXPORT_SAVING +msgid "Saving export file" +msgstr "Enregistrement du fichier d'exportation" + +#: MSG_EXPORT_START +msgid "Starting export" +msgstr "Début de l'exportation" + +#: MSG_EXPORT_STARTING +msgid "Starting export process" +msgstr "Démarrage du processus d'exportation" + +#: MSG_EXPORT_SUMMARY +msgid "Export summary" +msgstr "Résumé de l'exportation" + +#: MSG_FAILED +msgid "Failed" +msgstr "Échec" + +#: MSG_FILE_CREATED +msgid "File created: %s" +msgstr "Fichier créé : %s" + +#: MSG_FILE_DELETED +msgid "File deleted: %s" +msgstr "Fichier supprimé : %s" + +#: MSG_FILE_EXISTS +msgid "File exists" +msgstr "" + +#: MSG_FILE_EXISTS_NOT +msgid "File not found" +msgstr "" + +#: MSG_FILE_HAS_CONTENT +msgid "File has content" +msgstr "" + +#: MSG_FILE_IS_READABLE +msgid "File is readable" +msgstr "" + +#: MSG_FILE_IS_WRITABLE +msgid "File is writable" +msgstr "" + +#: MSG_FILE_NOT_FOUND +msgid "File not found: %s" +msgstr "Fichier introuvable : %s" + +#: MSG_FILE_PERMISSION_DENIED +msgid "Permission denied for file: %s" +msgstr "Permission refusée pour le fichier : %s" + +#: MSG_FILE_READ_ERROR +msgid "File read error" +msgstr "Erreur de lecture du fichier" + +#: MSG_FILE_UPDATED +msgid "File updated: %s" +msgstr "Fichier mis à jour : %s" + +#: MSG_FILE_WRITE_ERROR +msgid "File write error" +msgstr "Erreur d'écriture du fichier" + +#: MSG_GOODBYE +msgid "Goodbye" +msgstr "Au revoir" + +#: MSG_HELLO +msgid "Hello" +msgstr "Bonjour" + +#: MSG_HELP +msgid "Help" +msgstr "AIDE" + +#: MSG_INFO +msgid "Information" +msgstr "INFO" + +#: MSG_INVALID_OPTION +msgid "Invalid option" +msgstr "Option invalide" + +#: MSG_LANGUAGE_SET +msgid "Language set to" +msgstr "Langue définie" + +#: MSG_LOG_DIRECTORY +msgid "Log directory" +msgstr "" + +#: MSG_MISSING_DEPENDENCIES +msgid "Some required dependencies are missing. Please install them before continuing." +msgstr "" + +#: MSG_NO +msgid "No" +msgstr "NON" + +#: MSG_NONE +msgid "none" +msgstr "aucune" + +#: MSG_NO_OPTION +msgid "No option provided, using default" +msgstr "" + +#: MSG_OS_TYPE +msgid "OS Type" +msgstr "" + +#: MSG_PLEASE_WAIT +msgid "Please wait" +msgstr "Veuillez patienter" + +#: MSG_PROCESSING +msgid "Processing" +msgstr "Traitement en cours" + +#: MSG_REFRESH_TOKEN_CHECK +msgid "Refresh token check" +msgstr "" + +#: MSG_REFRESH_TOKEN_FOUND +msgid "Refresh token found" +msgstr "" + +#: MSG_REFRESH_TOKEN_NOT_FOUND +msgid "Refresh token not found" +msgstr "" + +#: MSG_RETRIEVING_INFO +msgid "Retrieving information" +msgstr "" + +#: MSG_RUNNING_DOCKER +msgid "Running in Docker container" +msgstr "Exécution dans un conteneur Docker" + +#: MSG_RUNNING_IN +msgid "running on" +msgstr "exécuté sur" + +#: MSG_SCRIPT_COMPLETE +msgid "Script execution completed successfully" +msgstr "" + +#: MSG_SCRIPT_DIRECTORY +msgid "Script directory" +msgstr "" + +#: MSG_SCRIPT_ERROR +msgid "An error occurred while running the script" +msgstr "Une erreur s'est produite lors de l'exécution du script" + +#: MSG_SCRIPT_EXECUTION_END +msgid "Script execution ended" +msgstr "Fin de l'exécution du script" + +#: MSG_SCRIPT_EXECUTION_START +msgid "Script execution started" +msgstr "Démarrage de l'exécution du script" + +#: MSG_SCRIPT_FINISHED +msgid "Export Trakt 4 Letterboxd script finished" +msgstr "Script Export Trakt 4 Letterboxd terminé" + +#: MSG_SCRIPT_INTERRUPTED +msgid "Script interrupted by user" +msgstr "Script interrompu par l'utilisateur" + +#: MSG_SCRIPT_OPTION +msgid "Script option" +msgstr "Option du script" + +#: MSG_SCRIPT_STARTING +msgid "Starting Export Trakt 4 Letterboxd script" +msgstr "Démarrage du script Export Trakt 4 Letterboxd" + +#: MSG_STARTING +msgid "Starting" +msgstr "Démarrage en cours" + +#: MSG_SUCCESS +msgid "Success" +msgstr "SUCCÈS" + +#: MSG_TRAKT_API_ERROR +msgid "Error connecting to Trakt API" +msgstr "Erreur de connexion à l'API Trakt" + +#: MSG_TRAKT_API_RATE_LIMIT +msgid "Trakt API rate limit reached, waiting..." +msgstr "Limite de débit de l'API Trakt atteinte, en attente..." + +#: MSG_TRAKT_AUTH_FAILED +msgid "Trakt authentication failed" +msgstr "Échec de l'authentification Trakt" + +#: MSG_TRAKT_AUTH_REQUIRED +msgid "Trakt authentication required" +msgstr "Authentification Trakt requise" + +#: MSG_TRAKT_AUTH_SUCCESS +msgid "Trakt authentication successful" +msgstr "Authentification Trakt réussie" + +#: MSG_TRANSLATION_ERROR +msgid "Translation error" +msgstr "Erreur de traduction" + +#: MSG_TRANSLATION_LOADED +msgid "Translation loaded" +msgstr "Traduction chargée" + +#: MSG_TRANSLATION_MISSING +msgid "Translation missing" +msgstr "Traduction manquante" + +#: MSG_TRANSLATION_UPDATED +msgid "Translation updated" +msgstr "Traduction mise à jour" + +#: MSG_USER +msgid "User" +msgstr "" + +#: MSG_USER_CONFIRM +msgid "Do you want to continue? (y/N)" +msgstr "Voulez-vous continuer ? (o/N)" + +#: MSG_USER_CONFIRMATION +msgid "User confirmation" +msgstr "Confirmation utilisateur" + +#: MSG_USER_INPUT +msgid "User input" +msgstr "Entrée utilisateur" + +#: MSG_USER_INPUT_REQUIRED +msgid "Please provide input" +msgstr "Veuillez fournir une entrée" + +#: MSG_USER_INVALID_INPUT +msgid "Invalid input, please try again" +msgstr "Entrée invalide, veuillez réessayer" + +#: MSG_USER_PROMPT +msgid "User prompt" +msgstr "Invite utilisateur" + +#: MSG_USER_SELECTION +msgid "User selection" +msgstr "Sélection utilisateur" + +#: MSG_WARNING +msgid "Warning" +msgstr "AVERTISSEMENT" + +#: MSG_WELCOME +msgid "Welcome to Export Trakt 4 Letterboxd" +msgstr "Bienvenue dans Export_Trakt_4_Letterboxd" + +#: MSG_WORKING_DIRECTORY +msgid "Working directory" +msgstr "" + +#: MSG_YES +msgid "Yes" +msgstr "OUI" + +#: MSG_all_dependencies_installed +msgid "All required dependencies are installed." +msgstr "Toutes les dépendances requises sont installées." + diff --git a/exports/it.po b/exports/it.po new file mode 100644 index 0000000..29b2587 --- /dev/null +++ b/exports/it.po @@ -0,0 +1,530 @@ +# Translation for Export Trakt 4 Letterboxd +# Copyright (C) 2025 Export Trakt 4 Letterboxd +# This file is distributed under the same license as the Export Trakt 4 Letterboxd package. +# +msgid "" +msgstr "" +"Project-Id-Version: Export Trakt 4 Letterboxd 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2025-03-24 22:44+0100\n" +"PO-Revision-Date: 2025-03-24 22:44+0100\n" +"Last-Translator: Automatic export\n" +"Language-Team: it\n" +"Language: it\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: MSG_ABORT +msgid "Abort" +msgstr "INTERROTTO" + +#: MSG_ACCESS_TOKEN_CHECK +msgid "Access token check" +msgstr "" + +#: MSG_ACCESS_TOKEN_FOUND +msgid "Access token found" +msgstr "" + +#: MSG_ACCESS_TOKEN_NOT_FOUND +msgid "Access token not found" +msgstr "" + +#: MSG_API_AUTH_FAILURE +msgid "Authentication failed" +msgstr "Autenticazione fallita" + +#: MSG_API_AUTH_REQUIRED +msgid "Authentication required" +msgstr "Autenticazione richiesta" + +#: MSG_API_AUTH_SUCCESS +msgid "Authentication successful" +msgstr "Autenticazione riuscita" + +#: MSG_API_ERROR +msgid "API error" +msgstr "Errore API" + +#: MSG_API_KEY_CHECK +msgid "API key check" +msgstr "" + +#: MSG_API_KEY_FOUND +msgid "API key found" +msgstr "" + +#: MSG_API_KEY_NOT_FOUND +msgid "API key not found" +msgstr "" + +#: MSG_API_LIMIT +msgid "API limit reached" +msgstr "Limite API raggiunto" + +#: MSG_API_REQUEST +msgid "API request" +msgstr "Richiesta API" + +#: MSG_API_RESPONSE +msgid "API response" +msgstr "Risposta API" + +#: MSG_API_RETRY +msgid "Retry" +msgstr "Nuovo tentativo" + +#: MSG_API_SECRET_CHECK +msgid "API secret check" +msgstr "" + +#: MSG_API_SECRET_FOUND +msgid "API secret found" +msgstr "" + +#: MSG_API_SECRET_NOT_FOUND +msgid "API secret not found" +msgstr "" + +#: MSG_API_WAIT +msgid "Waiting before next request" +msgstr "Attesa prima della prossima richiesta" + +#: MSG_AUTO_DETECTED +msgid "auto-detected" +msgstr "rilevamento automatico" + +#: MSG_BACKUP_DIRECTORY +msgid "Backup directory" +msgstr "" + +#: MSG_BACKUP_DIRECTORY_EXISTS +msgid "Backup directory exists" +msgstr "" + +#: MSG_BACKUP_DIRECTORY_NOT_WRITABLE +msgid "WARNING: Backup directory is not writable. Check permissions." +msgstr "" + +#: MSG_BACKUP_DIRECTORY_WRITABLE +msgid "Backup directory is writable" +msgstr "" + +#: MSG_CANCEL +msgid "Cancel" +msgstr "ANNULLA" + +#: MSG_CHECKING_DEPENDENCIES +msgid "Checking required dependencies" +msgstr "" + +#: MSG_CONFIG_CREATED +msgid "Configuration file created" +msgstr "File di configurazione creato" + +#: MSG_CONFIG_DEFAULT +msgid "Default configuration" +msgstr "Configurazione predefinita" + +#: MSG_CONFIG_ERROR +msgid "Error in configuration file" +msgstr "Errore nel file di configurazione" + +#: MSG_CONFIG_LOADED +msgid "Configuration loaded" +msgstr "Configurazione caricata" + +#: MSG_CONFIG_LOADING +msgid "Loading configuration" +msgstr "Caricamento della configurazione" + +#: MSG_CONFIG_MISSING +msgid "Configuration missing" +msgstr "Configurazione mancante" + +#: MSG_CONFIG_NOT_FOUND +msgid "Configuration file not found" +msgstr "File di configurazione non trovato" + +#: MSG_CONFIG_SAVED +msgid "Configuration saved" +msgstr "Configurazione salvata" + +#: MSG_CONFIG_SAVING +msgid "Saving configuration" +msgstr "Salvataggio della configurazione" + +#: MSG_CONFIG_UPDATED +msgid "Configuration updated" +msgstr "Configurazione aggiornata" + +#: MSG_CONFIRM +msgid "Confirm" +msgstr "CONFERMA" + +#: MSG_CONTINUE +msgid "Continue" +msgstr "CONTINUA" + +#: MSG_COPY_DIRECTORY +msgid "Copy directory" +msgstr "" + +#: MSG_CREATED_BACKUP_DIRECTORY +msgid "Created backup directory" +msgstr "" + +#: MSG_DIRECTORY_CREATED +msgid "Directory created" +msgstr "Directory creata" + +#: MSG_DIRECTORY_EXISTS +msgid "Directory exists" +msgstr "" + +#: MSG_DIRECTORY_NOT_FOUND +msgid "Directory not found" +msgstr "Directory non trovata" + +#: MSG_DIRECTORY_PERMISSIONS +msgid "Directory permissions" +msgstr "" + +#: MSG_DONE +msgid "Done" +msgstr "COMPLETATO" + +#: MSG_ENVIRONMENT_INFO +msgid "Environment information" +msgstr "" + +#: MSG_ERROR +msgid "Error" +msgstr "ERRORE" + +#: MSG_ERROR_MISSING_LANG_FILE +msgid "Error: Language file not found. Using English defaults." +msgstr "Errore: File di lingua non trovato. Utilizzo dei valori predefiniti in inglese." + +#: MSG_EXISTING_CSV_CHECK +msgid "Existing CSV file check" +msgstr "" + +#: MSG_EXIT +msgid "Exit" +msgstr "ESCI" + +#: MSG_EXPORT_COMPLETE +msgid "Export completed" +msgstr "Esportazione completata" + +#: MSG_EXPORT_FAILED +msgid "Export process failed" +msgstr "Processo di esportazione fallito" + +#: MSG_EXPORT_FILE_CREATED +msgid "Export file created: %s" +msgstr "File di esportazione creato: %s" + +#: MSG_EXPORT_FINISHED +msgid "Export process completed" +msgstr "Processo di esportazione completato" + +#: MSG_EXPORT_FORMATTING +msgid "Formatting export data" +msgstr "Formattazione dei dati di esportazione" + +#: MSG_EXPORT_GENERATING +msgid "Generating export file" +msgstr "Generazione del file di esportazione" + +#: MSG_EXPORT_NO_DATA +msgid "No data to export" +msgstr "Nessun dato da esportare" + +#: MSG_EXPORT_PROCESSING +msgid "Processing export data" +msgstr "Elaborazione dei dati di esportazione" + +#: MSG_EXPORT_SAVING +msgid "Saving export file" +msgstr "Salvataggio del file di esportazione" + +#: MSG_EXPORT_START +msgid "Starting export" +msgstr "Inizio dell'esportazione" + +#: MSG_EXPORT_STARTING +msgid "Starting export process" +msgstr "Avvio del processo di esportazione" + +#: MSG_EXPORT_SUMMARY +msgid "Export summary" +msgstr "Riepilogo dell'esportazione" + +#: MSG_FAILED +msgid "Failed" +msgstr "Fallito" + +#: MSG_FILE_CREATED +msgid "File created: %s" +msgstr "File creato: %s" + +#: MSG_FILE_DELETED +msgid "File deleted: %s" +msgstr "File eliminato: %s" + +#: MSG_FILE_EXISTS +msgid "File exists" +msgstr "" + +#: MSG_FILE_EXISTS_NOT +msgid "File not found" +msgstr "" + +#: MSG_FILE_HAS_CONTENT +msgid "File has content" +msgstr "" + +#: MSG_FILE_IS_READABLE +msgid "File is readable" +msgstr "" + +#: MSG_FILE_IS_WRITABLE +msgid "File is writable" +msgstr "" + +#: MSG_FILE_NOT_FOUND +msgid "File not found: %s" +msgstr "File non trovato: %s" + +#: MSG_FILE_PERMISSION_DENIED +msgid "Permission denied for file: %s" +msgstr "Permesso negato per il file: %s" + +#: MSG_FILE_READ_ERROR +msgid "File read error" +msgstr "Errore di lettura del file" + +#: MSG_FILE_UPDATED +msgid "File updated: %s" +msgstr "File aggiornato: %s" + +#: MSG_FILE_WRITE_ERROR +msgid "File write error" +msgstr "Errore di scrittura del file" + +#: MSG_GOODBYE +msgid "Goodbye" +msgstr "Arrivederci" + +#: MSG_HELLO +msgid "Hello" +msgstr "Ciao" + +#: MSG_HELP +msgid "Help" +msgstr "AIUTO" + +#: MSG_INFO +msgid "Information" +msgstr "INFO" + +#: MSG_INVALID_OPTION +msgid "Invalid option" +msgstr "Opzione non valida" + +#: MSG_LANGUAGE_SET +msgid "Language set to" +msgstr "Lingua impostata" + +#: MSG_LOG_DIRECTORY +msgid "Log directory" +msgstr "" + +#: MSG_MISSING_DEPENDENCIES +msgid "Some required dependencies are missing. Please install them before continuing." +msgstr "" + +#: MSG_NO +msgid "No" +msgstr "NO" + +#: MSG_NONE +msgid "none" +msgstr "nessuna" + +#: MSG_NO_OPTION +msgid "No option provided, using default" +msgstr "" + +#: MSG_OS_TYPE +msgid "OS Type" +msgstr "" + +#: MSG_PLEASE_WAIT +msgid "Please wait" +msgstr "Attendere prego" + +#: MSG_PROCESSING +msgid "Processing" +msgstr "Elaborazione in corso" + +#: MSG_REFRESH_TOKEN_CHECK +msgid "Refresh token check" +msgstr "" + +#: MSG_REFRESH_TOKEN_FOUND +msgid "Refresh token found" +msgstr "" + +#: MSG_REFRESH_TOKEN_NOT_FOUND +msgid "Refresh token not found" +msgstr "" + +#: MSG_RETRIEVING_INFO +msgid "Retrieving information" +msgstr "" + +#: MSG_RUNNING_DOCKER +msgid "Running in Docker container" +msgstr "In esecuzione in un contenitore Docker" + +#: MSG_RUNNING_IN +msgid "running on" +msgstr "in esecuzione su" + +#: MSG_SCRIPT_COMPLETE +msgid "Script execution completed successfully" +msgstr "" + +#: MSG_SCRIPT_DIRECTORY +msgid "Script directory" +msgstr "" + +#: MSG_SCRIPT_ERROR +msgid "An error occurred while running the script" +msgstr "Si è verificato un errore durante l'esecuzione dello script" + +#: MSG_SCRIPT_EXECUTION_END +msgid "Script execution ended" +msgstr "Fine dell'esecuzione dello script" + +#: MSG_SCRIPT_EXECUTION_START +msgid "Script execution started" +msgstr "Inizio dell'esecuzione dello script" + +#: MSG_SCRIPT_FINISHED +msgid "Export Trakt 4 Letterboxd script finished" +msgstr "Script Export Trakt 4 Letterboxd terminato" + +#: MSG_SCRIPT_INTERRUPTED +msgid "Script interrupted by user" +msgstr "Script interrotto dall'utente" + +#: MSG_SCRIPT_OPTION +msgid "Script option" +msgstr "Opzione dello script" + +#: MSG_SCRIPT_STARTING +msgid "Starting Export Trakt 4 Letterboxd script" +msgstr "Avvio dello script Export Trakt 4 Letterboxd" + +#: MSG_STARTING +msgid "Starting" +msgstr "Avvio in corso" + +#: MSG_SUCCESS +msgid "Success" +msgstr "SUCCESSO" + +#: MSG_TRAKT_API_ERROR +msgid "Error connecting to Trakt API" +msgstr "Errore di connessione all'API Trakt" + +#: MSG_TRAKT_API_RATE_LIMIT +msgid "Trakt API rate limit reached, waiting..." +msgstr "Limite di velocità dell'API Trakt raggiunto, in attesa..." + +#: MSG_TRAKT_AUTH_FAILED +msgid "Trakt authentication failed" +msgstr "Autenticazione Trakt fallita" + +#: MSG_TRAKT_AUTH_REQUIRED +msgid "Trakt authentication required" +msgstr "Autenticazione Trakt richiesta" + +#: MSG_TRAKT_AUTH_SUCCESS +msgid "Trakt authentication successful" +msgstr "Autenticazione Trakt riuscita" + +#: MSG_TRANSLATION_ERROR +msgid "Translation error" +msgstr "Errore di traduzione" + +#: MSG_TRANSLATION_LOADED +msgid "Translation loaded" +msgstr "Traduzione caricata" + +#: MSG_TRANSLATION_MISSING +msgid "Translation missing" +msgstr "Traduzione mancante" + +#: MSG_TRANSLATION_UPDATED +msgid "Translation updated" +msgstr "Traduzione aggiornata" + +#: MSG_USER +msgid "User" +msgstr "" + +#: MSG_USER_CONFIRM +msgid "Do you want to continue? (y/N)" +msgstr "Vuoi continuare? (s/N)" + +#: MSG_USER_CONFIRMATION +msgid "User confirmation" +msgstr "Conferma utente" + +#: MSG_USER_INPUT +msgid "User input" +msgstr "Input utente" + +#: MSG_USER_INPUT_REQUIRED +msgid "Please provide input" +msgstr "Si prega di fornire un input" + +#: MSG_USER_INVALID_INPUT +msgid "Invalid input, please try again" +msgstr "Input non valido, riprova" + +#: MSG_USER_PROMPT +msgid "User prompt" +msgstr "Prompt utente" + +#: MSG_USER_SELECTION +msgid "User selection" +msgstr "Selezione utente" + +#: MSG_WARNING +msgid "Warning" +msgstr "AVVISO" + +#: MSG_WELCOME +msgid "Welcome to Export Trakt 4 Letterboxd" +msgstr "Benvenuto in Export Trakt 4 Letterboxd" + +#: MSG_WORKING_DIRECTORY +msgid "Working directory" +msgstr "" + +#: MSG_YES +msgid "Yes" +msgstr "SÌ" + +#: MSG_all_dependencies_installed +msgid "All required dependencies are installed." +msgstr "All required dependencies are installed." + diff --git a/go.mod b/go.mod deleted file mode 100644 index 9921cd2..0000000 --- a/go.mod +++ /dev/null @@ -1,19 +0,0 @@ -module github.com/JohanDevl/Export_Trakt_4_Letterboxd - -go 1.22 - -require ( - github.com/BurntSushi/toml v1.3.2 - github.com/nicksnyder/go-i18n/v2 v2.4.0 - github.com/robfig/cron/v3 v3.0.1 - github.com/sirupsen/logrus v1.9.3 - github.com/stretchr/testify v1.7.0 - golang.org/x/text v0.14.0 -) - -require ( - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - golang.org/x/sys v0.5.0 // indirect - gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect -) diff --git a/go.sum b/go.sum deleted file mode 100644 index 1003afb..0000000 --- a/go.sum +++ /dev/null @@ -1,27 +0,0 @@ -github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= -github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/nicksnyder/go-i18n/v2 v2.4.0 h1:3IcvPOAvnCKwNm0TB0dLDTuawWEj+ax/RERNC+diLMM= -github.com/nicksnyder/go-i18n/v2 v2.4.0/go.mod h1:nxYSZE9M0bf3Y70gPQjN9ha7XNHX7gMc814+6wVyEI4= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= -github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= -github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= -github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/install.sh b/install.sh new file mode 100755 index 0000000..2e7b737 --- /dev/null +++ b/install.sh @@ -0,0 +1,131 @@ +#!/bin/bash +# +# Installation script for Export Trakt 4 Letterboxd +# + +# Color codes +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +BLUE='\033[0;34m' +CYAN='\033[0;36m' +NC='\033[0m' +BOLD='\033[1m' + +# Get script directory (resolving symlinks) +SCRIPT_DIR="$( cd "$( dirname "$(readlink -f "${BASH_SOURCE[0]}" 2>/dev/null || echo "${BASH_SOURCE[0]}")" )" && pwd )" + +echo -e "${BLUE}${BOLD}==================================================${NC}" +echo -e "${GREEN}${BOLD} Export Trakt 4 Letterboxd - Installation Script ${NC}" +echo -e "${BLUE}${BOLD}==================================================${NC}" +echo + +# Create essential directories +echo -e "${YELLOW}Creating required directories...${NC}" +directories=("lib" "config" "logs" "backup" "TEMP" "copy") + +for dir in "${directories[@]}"; do + if [ ! -d "${SCRIPT_DIR}/${dir}" ]; then + mkdir -p "${SCRIPT_DIR}/${dir}" + echo -e " - Created directory: ${CYAN}${dir}${NC}" + else + echo -e " - Directory already exists: ${CYAN}${dir}${NC}" + fi +done +echo -e "${GREEN}✓ Directories setup complete${NC}" +echo + +# Check for required dependencies +echo -e "${YELLOW}Checking required dependencies...${NC}" +dependencies=("curl" "jq" "sed" "awk") +missing_deps=() + +for cmd in "${dependencies[@]}"; do + if ! command -v "$cmd" &> /dev/null; then + echo -e " - ${RED}✗ $cmd not found${NC}" + missing_deps+=("$cmd") + else + echo -e " - ${GREEN}✓ $cmd found: $(command -v "$cmd")${NC}" + fi +done + +if [ ${#missing_deps[@]} -gt 0 ]; then + echo -e "\n${RED}${BOLD}Missing dependencies:${NC} ${missing_deps[*]}" + echo -e "${YELLOW}Please install the missing dependencies before continuing.${NC}" + + if [[ "$OSTYPE" == "darwin"* ]]; then + echo -e "\n${CYAN}On macOS, you can install them with:${NC}" + echo " brew install ${missing_deps[*]}" + elif [[ -f /etc/debian_version ]]; then + echo -e "\n${CYAN}On Debian/Ubuntu, you can install them with:${NC}" + echo " sudo apt update && sudo apt install ${missing_deps[*]}" + elif [[ -f /etc/fedora-release ]]; then + echo -e "\n${CYAN}On Fedora, you can install them with:${NC}" + echo " sudo dnf install ${missing_deps[*]}" + fi + + exit 1 +fi +echo -e "${GREEN}✓ All dependencies are installed${NC}" +echo + +# Check if config file exists, create from example if it doesn't +echo -e "${YELLOW}Setting up configuration...${NC}" +if [ ! -f "${SCRIPT_DIR}/config/.config.cfg" ]; then + if [ -f "${SCRIPT_DIR}/config/.config.cfg.example" ]; then + cp "${SCRIPT_DIR}/config/.config.cfg.example" "${SCRIPT_DIR}/config/.config.cfg" + echo -e " - ${CYAN}Created config file from example${NC}" + echo -e " - ${YELLOW}${BOLD}IMPORTANT: Edit ${SCRIPT_DIR}/config/.config.cfg with your Trakt.tv credentials${NC}" + else + echo -e " - ${RED}No config example found. Creating minimal config...${NC}" + cat > "${SCRIPT_DIR}/config/.config.cfg" << EOF +# Trakt API Configuration +API_URL="https://api.trakt.tv" +API_KEY="" +API_SECRET="" +REDIRECT_URI="urn:ietf:wg:oauth:2.0:oob" +ACCESS_TOKEN="" +REFRESH_TOKEN="" +USERNAME="" + +# Script paths +DOSLOG="${SCRIPT_DIR}/logs" +DOSCOPY="${SCRIPT_DIR}/copy" +BACKUP_DIR="${SCRIPT_DIR}/backup" +TEMP_DIR="${SCRIPT_DIR}/TEMP" +EOF + echo -e " - ${YELLOW}${BOLD}IMPORTANT: Edit ${SCRIPT_DIR}/config/.config.cfg with your Trakt.tv credentials${NC}" + fi +else + echo -e " - ${CYAN}Config file already exists${NC}" +fi +echo -e "${GREEN}✓ Configuration setup complete${NC}" +echo + +# Set correct file permissions +echo -e "${YELLOW}Setting file permissions...${NC}" +chmod 755 "${SCRIPT_DIR}/Export_Trakt_4_Letterboxd.sh" +chmod 755 "${SCRIPT_DIR}/lib/"*.sh 2>/dev/null || echo " - No library files to set permissions yet" +chmod 644 "${SCRIPT_DIR}/config/.config.cfg" 2>/dev/null || echo " - No config file to set permissions yet" +echo -e "${GREEN}✓ File permissions set${NC}" +echo + +# Final setup message +echo -e "${BLUE}${BOLD}==================================================${NC}" +echo -e "${GREEN}${BOLD}Installation Complete!${NC}" +echo -e "${BLUE}${BOLD}==================================================${NC}" +echo +echo -e "${CYAN}Next steps:${NC}" +echo -e "1. Edit ${YELLOW}${SCRIPT_DIR}/config/.config.cfg${NC} with your Trakt API credentials" +echo -e "2. Run ${YELLOW}./setup_trakt.sh${NC} to authenticate with Trakt" +echo -e "3. Run ${YELLOW}./Export_Trakt_4_Letterboxd.sh${NC} to export your data" +echo +echo -e "${CYAN}Available options:${NC}" +echo -e " - ${YELLOW}normal${NC}: Export movie history (default)" +echo -e " - ${YELLOW}initial${NC}: Export only essential data for first-time users" +echo -e " - ${YELLOW}complete${NC}: Export all data (history, ratings, watchlist, etc.)" +echo +echo -e "${BLUE}For more information, see the README.md file${NC}" +echo + +exit 0 \ No newline at end of file diff --git a/lib/config.sh b/lib/config.sh new file mode 100755 index 0000000..333581e --- /dev/null +++ b/lib/config.sh @@ -0,0 +1,126 @@ +#!/bin/bash +# +# Configuration management functions +# + +# Load configuration from appropriate location +load_config() { + local script_dir="$1" + local config_dir="${script_dir}/config" + local log_file="$2" + + # Determine config file path based on environment + if [ -f "/app/config/.config.cfg" ]; then + # If running in Docker, use the absolute path + source /app/config/.config.cfg + echo "Using Docker config file: /app/config/.config.cfg" | tee -a "${log_file}" + else + # If running locally, use the relative path + source "${config_dir}/.config.cfg" + echo "Using local config file: ${config_dir}/.config.cfg" | tee -a "${log_file}" + fi +} + +# Initialize temporary directory +init_temp_dir() { + local temp_dir="$1" + local log_file="$2" + + # Create directory if it doesn't exist + if [ ! -d "$temp_dir" ]; then + mkdir -p "$temp_dir" + echo "Created temporary directory: $temp_dir" | tee -a "${log_file}" + else + # Only remove the contents, not the directory itself + echo "Cleaning temporary directory: $temp_dir" | tee -a "${log_file}" + find "$temp_dir" -mindepth 1 -delete 2>/dev/null || { + # If find fails, try a more aggressive approach + chmod -R 777 "$temp_dir" 2>/dev/null + find "$temp_dir" -mindepth 1 -delete 2>/dev/null || echo "$(_ "warning"): Could not clean temporary directory completely" | tee -a "${log_file}" + } + fi + + # Ensure directory has proper permissions + chmod -R 777 "$temp_dir" 2>/dev/null || echo "$(_ "warning"): Could not set permissions on temporary directory" | tee -a "${log_file}" + echo "Temporary directory ready: $temp_dir" | tee -a "${log_file}" +} + +# Ensure required directories exist +ensure_directories() { + local log_dir="$1" + local copy_dir="$2" + local log_file="$3" + + # Create log directory if needed + if [ ! -d "$log_dir" ]; then + mkdir -p "$log_dir" + echo "Created log directory: $log_dir" | tee -a "${log_file}" + fi + + # Check and create copy directory if needed + if [ -d "$copy_dir" ]; then + echo "$(_ "directory_exists"): ✅" | tee -a "${log_file}" + echo "$(_ "directory_permissions"): $(ls -la "$copy_dir" | head -n 1 | awk '{print $1}')" | tee -a "${log_file}" + else + echo "$(_ "directory_exists"): ❌ (will attempt to create)" | tee -a "${log_file}" + mkdir -p "$copy_dir" + fi +} + +# Log environment information +log_environment() { + local log_file="$1" + local script_dir="$2" + local copy_dir="$3" + local log_dir="$4" + local backup_dir="$5" + + echo "🌍 $(_ "environment_info"):" | tee -a "${log_file}" + echo " - $(_ "user"): $(whoami)" | tee -a "${log_file}" + echo " - $(_ "working_directory"): $(pwd)" | tee -a "${log_file}" + echo " - $(_ "script_directory"): $script_dir" | tee -a "${log_file}" + echo " - $(_ "copy_directory"): $copy_dir" | tee -a "${log_file}" + echo " - $(_ "log_directory"): $log_dir" | tee -a "${log_file}" + echo " - $(_ "backup_directory"): $backup_dir" | tee -a "${log_file}" + echo " - $(_ "os_type"): $OSTYPE" | tee -a "${log_file}" + echo "-----------------------------------" | tee -a "${log_file}" +} + +# Detect OS for sed compatibility +detect_os_sed() { + local log_file="$1" + + if [[ "$OSTYPE" == "darwin"* ]]; then + # macOS uses BSD sed + echo "sed -i ''" + echo "Detected macOS: Using BSD sed with empty string backup parameter" | tee -a "${log_file}" + else + # Linux and others use GNU sed + echo "sed -i" + echo "Detected Linux/other: Using GNU sed" | tee -a "${log_file}" + fi +} + +# Initialize backup directory +init_backup_dir() { + local backup_dir="$1" + local log_file="$2" + + # Create backup folder if it doesn't exist + if [ ! -d "${backup_dir}" ]; then + mkdir -p "${backup_dir}" + echo "$(_ "created_backup_directory"): ${backup_dir}" | tee -a "${log_file}" + else + echo "$(_ "backup_directory_exists"): ${backup_dir}" | tee -a "${log_file}" + fi + + # Check permissions + if [ -w "${backup_dir}" ]; then + echo "$(_ "backup_directory_writable"): ✅" | tee -a "${log_file}" + else + echo "$(_ "backup_directory_not_writable")" | tee -a "${log_file}" + fi + + # Return the backup directory for use + echo "${backup_dir}" +} \ No newline at end of file diff --git a/lib/data_processing.sh b/lib/data_processing.sh new file mode 100755 index 0000000..f16b65a --- /dev/null +++ b/lib/data_processing.sh @@ -0,0 +1,962 @@ +#!/bin/bash +# +# Data processing functions for Trakt to Letterboxd export +# + +# Create ratings lookup file from ratings_movies.json +create_ratings_lookup() { + local ratings_file="$1" + local output_file="$2" + local log_file="$3" + + if [ -f "$ratings_file" ] && [ -s "$ratings_file" ]; then + echo "DEBUG: Creating ratings lookup file..." | tee -a "${log_file}" + + # Dump the first few items for debugging + echo "DEBUG: First few items in ratings file:" | tee -a "${log_file}" + jq -r '.[:3] | map({id: .movie.ids.trakt, title: .movie.title, rating: .rating})' "$ratings_file" | tee -a "${log_file}" + + # Count ratings for debugging + local rating_count=$(jq '. | length' "$ratings_file" 2>/dev/null) + echo "📊 Found $rating_count ratings in file" | tee -a "${log_file}" + + # Verify JSON is valid before processing + if ! jq empty "$ratings_file" 2>/dev/null; then + echo "⚠️ WARNING: Invalid JSON in ratings file, creating empty lookup" | tee -a "${log_file}" + echo "{}" > "$output_file" + return 1 + fi + + # Ensure output directory exists + local output_dir=$(dirname "$output_file") + mkdir -p "$output_dir" 2>/dev/null + + # Extract a sample rating to verify structure + jq -r 'first | {title: .movie.title, id: .movie.ids.trakt, rating: .rating} | tostring' "$ratings_file" > /dev/null 2>&1 + if [ $? -ne 0 ]; then + echo "⚠️ WARNING: Unexpected JSON structure in ratings file" | tee -a "${log_file}" + echo "⚠️ Using alternative approach to extract ratings" | tee -a "${log_file}" + + # Alternative approach - more direct, less prone to structure issues + jq -c '{} as $result | reduce .[] as $item ($result; + if $item.movie and $item.movie.ids and $item.movie.ids.trakt and $item.rating then + .[$item.movie.ids.trakt | tostring] = $item.rating + else . end)' "$ratings_file" > "$output_file" 2>/dev/null + + if [ $? -ne 0 ]; then + echo "⚠️ WARNING: Alternative approach failed, creating basic lookup" | tee -a "${log_file}" + # Print a sample of the JSON for debugging + echo "DEBUG: Sample of ratings file:" | tee -a "${log_file}" + jq -r 'first | tostring' "$ratings_file" | tee -a "${log_file}" + echo "{}" > "$output_file" + return 1 + fi + else + # Original approach if structure is as expected + jq -c 'reduce .[] as $item ({}; + if $item.movie and $item.movie.ids and $item.movie.ids.trakt != null then + .[$item.movie.ids.trakt | tostring] = $item.rating + else . end)' "$ratings_file" > "$output_file" 2>/dev/null + + if [ $? -ne 0 ]; then + echo "⚠️ WARNING: Failed to process ratings file, creating empty lookup" | tee -a "${log_file}" + echo "{}" > "$output_file" + return 1 + fi + fi + + # Verify the lookup file was created successfully + if [ ! -s "$output_file" ]; then + echo "⚠️ WARNING: Ratings lookup file is empty, creating basic JSON" | tee -a "${log_file}" + echo "{}" > "$output_file" + return 1 + fi + + # Check content of created lookup file + local lookup_entries=$(jq 'length' "$output_file" 2>/dev/null || echo "0") + echo "📊 Created ratings lookup with $lookup_entries entries" | tee -a "${log_file}" + + # Show a sample for debugging + echo "📊 Sample ratings lookup:" | tee -a "${log_file}" + jq -r 'to_entries | .[0:3] | map("\(.key):\(.value)") | join(", ")' "$output_file" 2>/dev/null | tee -a "${log_file}" + + # Save a map of Trakt IDs to movie titles for easier debugging + local title_map_file="${output_file}.titles.json" + jq -c 'reduce .[] as $item ({}; + if $item.movie and $item.movie.ids and $item.movie.ids.trakt != null then + .[$item.movie.ids.trakt | tostring] = $item.movie.title + else . end)' "$ratings_file" > "$title_map_file" 2>/dev/null + + # Create a lookup for recent films we're specifically interested in + local recent_films_file="${output_file}.recent.json" + jq -c '[.[] | select(.movie.title | test("Jumanji: The Next Level|The Alto Knights|Paddington in Peru|The Gorge|Mickey 17|God Save The Tuches")) | {id: .movie.ids.trakt | tostring, title: .movie.title, rating: .rating}]' "$ratings_file" > "$recent_films_file" 2>/dev/null + + if [ -s "$recent_films_file" ]; then + echo "📊 Recent films ratings:" | tee -a "${log_file}" + cat "$recent_films_file" | tee -a "${log_file}" + fi + + return 0 + else + echo "WARNING: Ratings file not found or empty, creating empty lookup" | tee -a "${log_file}" + echo "{}" > "$output_file" + return 1 + fi +} + +# Create plays count lookup from watched_movies.json +create_plays_count_lookup() { + local watched_file="$1" + local output_file="$2" + local log_file="$3" + + if [ -f "$watched_file" ] && [ -s "$watched_file" ]; then + echo "DEBUG: Creating plays count lookup from watched_movies..." | tee -a "${log_file}" + + # Verify JSON is valid before processing + if ! jq empty "$watched_file" 2>/dev/null; then + echo "⚠️ WARNING: Invalid JSON in watched file, creating empty lookup" | tee -a "${log_file}" + echo "{}" > "$output_file" + return 1 + fi + + # Ensure output directory exists + local output_dir=$(dirname "$output_file") + mkdir -p "$output_dir" 2>/dev/null + + # Create the lookup with proper error handling - using IMDB ID as key + if ! jq -c 'reduce .[] as $item ({}; if $item.movie.ids.imdb != null then .[$item.movie.ids.imdb] = $item.plays else . end)' "$watched_file" > "$output_file" 2>/dev/null; then + echo "⚠️ WARNING: Failed to process watched file, creating empty lookup" | tee -a "${log_file}" + echo "{}" > "$output_file" + return 1 + fi + + # Verify the lookup file was created successfully + if [ ! -s "$output_file" ]; then + echo "⚠️ WARNING: Plays count lookup file is empty, creating basic JSON" | tee -a "${log_file}" + echo "{}" > "$output_file" + return 1 + fi + + return 0 + else + echo "WARNING: Watched movies file not found or empty, creating empty lookup" | tee -a "${log_file}" + echo "{}" > "$output_file" + return 1 + fi +} + +# Process history movies from history_movies JSON file +process_history_movies() { + local history_file="$1" + local ratings_lookup="$2" + local plays_lookup="$3" + local csv_output="$4" + local raw_output="$5" + local log="$6" + + # Check if history file exists + if [ ! -f "$history_file" ]; then + echo -e "⚠️ Movies history: No history_movies.json file found" | tee -a "${log}" + return 1 + fi + + if [ ! -s "$history_file" ]; then + echo -e "⚠️ Movies history: history_movies.json file is empty" | tee -a "${log}" + return 1 + fi + + # Verify if the JSON is valid + if ! jq empty "$history_file" 2>/dev/null; then + echo -e "⚠️ Movies history: Invalid JSON in history_movies.json" | tee -a "${log}" + return 1 + fi + + # Check ratings lookup file + if [ -f "$ratings_lookup" ] && [ -s "$ratings_lookup" ]; then + echo -e "📊 Ratings lookup file found: $(wc -c < "$ratings_lookup") bytes" | tee -a "${log}" + echo -e "📊 Sample ratings: $(jq -r 'to_entries | .[0:3] | map("\(.key):\(.value)") | join(", ")' "$ratings_lookup" 2>/dev/null || echo "Error reading ratings")" | tee -a "${log}" + + # Debug: Verify if specific Trakt IDs exist in the lookup + local debug_ids=("360095" "814646" "915974") # IDs from previous verification + echo -e "DEBUG: Verifying presence of known ratings IDs:" | tee -a "${log}" + for id in "${debug_ids[@]}"; do + local found_rating=$(jq -r --arg id "$id" '.[$id] // "not found"' "$ratings_lookup" 2>/dev/null) + echo -e "DEBUG: ID $id rating: $found_rating" | tee -a "${log}" + done + else + echo -e "⚠️ Ratings lookup file not found or empty: $ratings_lookup" | tee -a "${log}" + # Try to find ratings file in same directory as history file + local ratings_file="${history_file%/*}/johandev-ratings_movies.json" + if [ -f "$ratings_file" ] && [ -s "$ratings_file" ]; then + echo -e "🔍 Found ratings file: $ratings_file" | tee -a "${log}" + # Create a temporary ratings lookup file directly + local temp_ratings_lookup=$(mktemp) + jq -c '{} as $result | reduce .[] as $item ($result; + if $item.movie and $item.movie.ids and $item.movie.ids.trakt and $item.rating then + .[$item.movie.ids.trakt | tostring] = $item.rating + else . end)' "$ratings_file" > "$temp_ratings_lookup" 2>/dev/null + if [ -s "$temp_ratings_lookup" ]; then + echo -e "✅ Created temporary ratings lookup from ratings file" | tee -a "${log}" + ratings_lookup="$temp_ratings_lookup" + fi + fi + fi + + # Get number of movies in history + local movie_count=$(jq length "$history_file") + if [ "$movie_count" -eq 0 ]; then + echo -e "⚠️ Movies history: No movies found in history" | tee -a "${log}" + return 1 + fi + + echo "DEBUG: Processing history_movies file with ratings..." >> "${log}" + + # Create temporary files for processed data + local tmp_file=$(mktemp) + local ratings_direct_file=$(mktemp) + + # Extract ratings directly from history file if possible + jq -r '.[] | + select(.movie and .movie.ids and .movie.ids.trakt) | + [.movie.ids.trakt, (.rating // "")]| + @tsv' "$history_file" > "$ratings_direct_file" 2>/dev/null + + # Extract all raw data with ratings included + jq -r '.[] | + # Handle null values safely with defaults + . as $item | + { + title: (try ($item.movie.title) catch null // "Unknown Title"), + year: (try ($item.movie.year | tostring) catch null // ""), + imdb_id: (try ($item.movie.ids.imdb) catch null // ""), + tmdb_id: (try ($item.movie.ids.tmdb | tostring) catch null // ""), + watched_at: (try ($item.watched_at) catch null // ""), + trakt_id: (try ($item.movie.ids.trakt | tostring) catch null // ""), + rating: (try ($item.rating | tostring) catch null // "") + } | + # Build CSV line with safe values + [.title, .year, .imdb_id, .tmdb_id, .watched_at, .trakt_id, .rating] | + @csv' "$history_file" > "$tmp_file" + + # Handle errors in jq processing + if [ $? -ne 0 ]; then + echo -e "⚠️ Error processing history_movies.json with jq" | tee -a "${log}" + echo -e "Attempting alternative processing method..." | tee -a "${log}" + + # Alternative processing using a simpler jq command + jq -r '.[] | + [ + (.movie.title // "Unknown Title"), + (.movie.year // ""), + (.movie.ids.imdb // ""), + (.movie.ids.tmdb // ""), + (.watched_at // ""), + (.movie.ids.trakt // ""), + (.rating // "") + ] | + @csv' "$history_file" > "$tmp_file" 2>>"${log}" + + if [ $? -ne 0 ]; then + echo -e "⚠️ Alternative processing also failed" | tee -a "${log}" + return 1 + fi + fi + + # Check if tmp_file was created and has content + if [ ! -s "$tmp_file" ]; then + echo -e "⚠️ Error: No data extracted from history_movies.json" | tee -a "${log}" + return 1 + fi + + # Create raw output with headers + echo "Title,Year,imdbID,tmdbID,WatchedDate,TraktID,Rating" > "$raw_output" + cat "$tmp_file" >> "$raw_output" + + # If ratings_file is available, extract all ratings into a lookup map + local title_ratings={} + local imdb_ratings={} + local tmdb_ratings={} + local ratings_file="${history_file%/*}/johandev-ratings_movies.json" + + if [ -f "$ratings_file" ] && [ -s "$ratings_file" ]; then + echo "DEBUG: Found ratings file at $ratings_file, parsing additional ratings..." | tee -a "${log}" + # Create lookup maps by title, IMDb ID, and TMDB ID + title_ratings=$(jq -c 'reduce .[] as $item ({}; + if $item.movie.title != null then .[$item.movie.title] = $item.rating else . end)' "$ratings_file" 2>/dev/null) + + imdb_ratings=$(jq -c 'reduce .[] as $item ({}; + if $item.movie.ids.imdb != null then .[$item.movie.ids.imdb] = $item.rating else . end)' "$ratings_file" 2>/dev/null) + + tmdb_ratings=$(jq -c 'reduce .[] as $item ({}; + if $item.movie.ids.tmdb != null then .[$item.movie.ids.tmdb | tostring] = $item.rating else . end)' "$ratings_file" 2>/dev/null) + + echo "DEBUG: Created additional lookups from ratings file" | tee -a "${log}" + fi + + # Process the extracted data to add ratings and deduplicate + local processed_count=0 + local ratings_found=0 + local existing_ids=() + + # Add CSV header to output + echo "Title,Year,imdbID,tmdbID,WatchedDate,Rating10,Rewatch" > "$csv_output" + + while IFS=, read -r title year imdb_id tmdb_id watched_at trakt_id direct_rating; do + # Clean quotes from CSV fields if present + title=$(echo "$title" | sed -e 's/^"//' -e 's/"$//') + year=$(echo "$year" | sed -e 's/^"//' -e 's/"$//') + imdb_id=$(echo "$imdb_id" | sed -e 's/^"//' -e 's/"$//') + tmdb_id=$(echo "$tmdb_id" | sed -e 's/^"//' -e 's/"$//') + watched_at=$(echo "$watched_at" | sed -e 's/^"//' -e 's/"$//') + trakt_id=$(echo "$trakt_id" | sed -e 's/^"//' -e 's/"$//') + direct_rating=$(echo "$direct_rating" | sed -e 's/^"//' -e 's/"$//') + + # Skip entries with missing key data + if [ -z "$title" ] || [ "$title" = "null" ] || [ -z "$watched_at" ] || [ "$watched_at" = "null" ]; then + continue + fi + + # Format watched date (keep only YYYY-MM-DD) + watched_at=$(echo "$watched_at" | awk -F'T' '{print $1}') + + # Get rating from multiple sources with priority: + # 1. Direct rating from history item + # 2. Ratings lookup from trakt_id + # 3. Title, IMDb, or TMDB lookups + rating="" + + # Method 1: Check if we have a direct rating + if [ -n "$direct_rating" ] && [ "$direct_rating" != "null" ]; then + rating="$direct_rating" + ((ratings_found++)) + echo "DEBUG: Using direct rating for $title: $rating" | tee -a "${log}" + fi + + # Method 2: Check in ratings lookup by Trakt ID + if [ -z "$rating" ] && [ -f "$ratings_lookup" ] && [ -n "$trakt_id" ] && [ "$trakt_id" != "null" ]; then + local lookup_rating=$(jq -r --arg id "$trakt_id" '.[$id] // ""' "$ratings_lookup" 2>/dev/null) + if [ -n "$lookup_rating" ] && [ "$lookup_rating" != "null" ]; then + rating="$lookup_rating" + ((ratings_found++)) + echo "DEBUG: Found rating from lookup for $title ($trakt_id): $rating" | tee -a "${log}" + fi + fi + + # Method 3: Check in ratings directly from the ratings file by title + if [ -z "$rating" ] && [ -n "$title_ratings" ] && [ "$title_ratings" != "{}" ]; then + local title_rating=$(echo "$title_ratings" | jq -r --arg title "$title" '.[$title] // ""') + if [ -n "$title_rating" ] && [ "$title_rating" != "null" ]; then + rating="$title_rating" + ((ratings_found++)) + echo "DEBUG: Found rating by title for $title: $rating" | tee -a "${log}" + fi + fi + + # Method 4: Check by IMDb ID + if [ -z "$rating" ] && [ -n "$imdb_ratings" ] && [ "$imdb_ratings" != "{}" ] && [ -n "$imdb_id" ] && [ "$imdb_id" != "null" ]; then + local imdb_rating=$(echo "$imdb_ratings" | jq -r --arg id "$imdb_id" '.[$id] // ""') + if [ -n "$imdb_rating" ] && [ "$imdb_rating" != "null" ]; then + rating="$imdb_rating" + ((ratings_found++)) + echo "DEBUG: Found rating by IMDb ID for $title: $rating" | tee -a "${log}" + fi + fi + + # Method 5: Check by TMDB ID + if [ -z "$rating" ] && [ -n "$tmdb_ratings" ] && [ "$tmdb_ratings" != "{}" ] && [ -n "$tmdb_id" ] && [ "$tmdb_id" != "null" ]; then + local tmdb_rating=$(echo "$tmdb_ratings" | jq -r --arg id "$tmdb_id" '.[$id] // ""') + if [ -n "$tmdb_rating" ] && [ "$tmdb_rating" != "null" ]; then + rating="$tmdb_rating" + ((ratings_found++)) + echo "DEBUG: Found rating by TMDB ID for $title: $rating" | tee -a "${log}" + fi + fi + + # Method 6: Direct lookup from ratings file if all else fails + if [ -z "$rating" ] && [ -f "${history_file%/*}/johandev-ratings_movies.json" ] && [ -n "$trakt_id" ] && [ "$trakt_id" != "null" ]; then + rating=$(jq -r --arg tid "$trakt_id" '.[] | select(.movie.ids.trakt | tostring == $tid) | .rating' "${history_file%/*}/johandev-ratings_movies.json" 2>/dev/null | head -n1) + if [ -n "$rating" ] && [ "$rating" != "null" ]; then + ((ratings_found++)) + echo "DEBUG: Found direct rating for $title ($trakt_id): $rating" | tee -a "${log}" + fi + fi + + # Get plays count for rewatch flag + rewatch="false" + if [ -n "$trakt_id" ] && [ "$trakt_id" != "null" ]; then + play_count=$(jq -r --arg id "$trakt_id" '.[$id] // "0"' "$plays_lookup" 2>/dev/null) + if [ -n "$play_count" ] && [ "$play_count" != "null" ] && [ "$play_count" -gt 1 ]; then + rewatch="true" + fi + fi + + # Add data to CSV file if not a duplicate + # Filter by unique imdb or tmdb ids if available, otherwise by title+year + local item_id="" + if [ -n "$imdb_id" ] && [ "$imdb_id" != "null" ]; then + item_id="imdb:$imdb_id" + elif [ -n "$tmdb_id" ] && [ "$tmdb_id" != "null" ]; then + item_id="tmdb:$tmdb_id" + elif [ -n "$title" ] && [ -n "$year" ]; then + item_id="title:$title:$year" + else + # Skip if no identifier available + continue + fi + + # Check if movie is already processed (simple deduplication) + if [[ " ${existing_ids[@]} " =~ " ${item_id} " ]]; then + continue + fi + + # Ensure proper quoting and format for IMDb ID + if [[ ! "$imdb_id" =~ ^tt ]] && [ -n "$imdb_id" ] && [ "$imdb_id" != "null" ]; then + imdb_id="tt$imdb_id" + fi + + # Ensure proper quoting and format + echo "\"$title\",\"$year\",\"$imdb_id\",\"$tmdb_id\",\"$watched_at\",\"$rating\",\"$rewatch\"" >> "$csv_output" + + # Add to processed IDs + existing_ids+=("$item_id") + ((processed_count++)) + done < "$tmp_file" + + # Clean up temporary files + rm -f "$tmp_file" "$ratings_direct_file" + + # Report results + echo "Movies history: $processed_count movies processed" | tee -a "${log}" + echo "Ratings found: $ratings_found ratings added to CSV" | tee -a "${log}" + + if [ "$processed_count" -eq 0 ]; then + echo -e "⚠️ Movies history: No valid movies extracted from history" | tee -a "${log}" + return 1 + fi + + return 0 +} + +# Process watched movies (used in complete mode or when history is missing) +process_watched_movies() { + local watched_file="$1" + local ratings_lookup="$2" + local output_csv="$3" + local existing_ids_file="$4" + local raw_output_file="$5" + local is_fallback="$6" # true if this is a fallback for missing history + local log_file="$7" + + if [ ! -f "$watched_file" ]; then + echo "WARNING: Watched movies file not found, skipping" | tee -a "${log_file}" + return 1 + fi + + # Count total movies in the watched file + local watched_count=$(jq '. | length' "$watched_file" 2>/dev/null || echo "0") + echo "📊 Found $watched_count movies in watched file" | tee -a "${log_file}" + + # Check if ratings_lookup exists to avoid jq errors + local has_ratings=false + if [ -f "$ratings_lookup" ] && [ -s "$ratings_lookup" ]; then + has_ratings=true + echo "📊 Using ratings lookup for watched movies processing" | tee -a "${log_file}" + echo "📊 Sample ratings: $(jq -r 'to_entries | .[0:3] | map("\(.key):\(.value)") | join(", ")' "$ratings_lookup" 2>/dev/null || echo "Error reading ratings")" | tee -a "${log_file}" + else + echo "⚠️ WARNING: Ratings lookup file not found or empty, proceeding without ratings" | tee -a "${log_file}" + fi + + # Create a temporary file for the extracted data + local tmp_file=$(mktemp) + + if [ "$is_fallback" = "true" ]; then + echo "DEBUG: No history found. Processing watched_movies file with ratings..." | tee -a "${log_file}" + + # Add CSV header to output if needed + if [ ! -s "$output_csv" ]; then + echo "Title,Year,imdbID,tmdbID,WatchedDate,Rating10,Rewatch" > "$output_csv" + fi + + # Extract data from watched file to temporary file using jq + jq -r '.[] | + # Handle null values safely with defaults + . as $item | + { + title: (try ($item.movie.title) catch null // "Unknown Title"), + year: (try ($item.movie.year | tostring) catch null // ""), + imdb_id: (try ($item.movie.ids.imdb) catch null // ""), + tmdb_id: (try ($item.movie.ids.tmdb | tostring) catch null // ""), + watched_at: (try ($item.last_watched_at) catch null // ""), + trakt_id: (try ($item.movie.ids.trakt | tostring) catch null // ""), + plays: (try ($item.plays | tostring) catch null // "1") + } | + # Build CSV line with safe values + [.title, .year, .imdb_id, .tmdb_id, .watched_at, .trakt_id, .plays] | + @csv' "$watched_file" > "$tmp_file" + + # Process the file line by line + while IFS=, read -r title year imdb_id tmdb_id watched_at trakt_id plays; do + # Clean quotes from CSV fields if present + title=$(echo "$title" | sed -e 's/^"//' -e 's/"$//') + year=$(echo "$year" | sed -e 's/^"//' -e 's/"$//') + imdb_id=$(echo "$imdb_id" | sed -e 's/^"//' -e 's/"$//') + tmdb_id=$(echo "$tmdb_id" | sed -e 's/^"//' -e 's/"$//') + watched_at=$(echo "$watched_at" | sed -e 's/^"//' -e 's/"$//') + trakt_id=$(echo "$trakt_id" | sed -e 's/^"//' -e 's/"$//') + plays=$(echo "$plays" | sed -e 's/^"//' -e 's/"$//') + + # Skip entries with missing key data + if [ -z "$title" ] || [ "$title" = "null" ]; then + continue + fi + + # Format watched date (keep only YYYY-MM-DD) + watched_at=$(echo "$watched_at" | awk -F'T' '{print $1}') + + # Determine rewatch status + rewatch="false" + if [ -n "$plays" ] && [ "$plays" != "null" ] && [ "$plays" -gt 1 ]; then + rewatch="true" + fi + + # Get rating if available + rating="" + if [ "$has_ratings" = "true" ] && [ -n "$trakt_id" ] && [ "$trakt_id" != "null" ]; then + rating=$(jq -r --arg id "$trakt_id" '.[$id] // ""' "$ratings_lookup" 2>/dev/null) + if [ -n "$rating" ] && [ "$rating" != "null" ]; then + echo "DEBUG: Found rating for $title ($trakt_id): $rating" >> "${log_file}" + fi + fi + + # Add to CSV, ensuring proper format for IMDb ID + if [[ ! "$imdb_id" =~ ^tt ]]; then + imdb_id="tt$imdb_id" + fi + + echo "\"$title\",\"$year\",\"$imdb_id\",\"$tmdb_id\",\"$watched_at\",\"$rating\",\"$rewatch\"" >> "$output_csv" + done < "$tmp_file" + + # Report results + local processed_count=$(grep -c "," "$output_csv") + echo "Movies watched: $processed_count movies processed (fallback mode)" | tee -a "${log_file}" + else + echo "DEBUG: Processing watched_movies file with ratings (complete mode)..." | tee -a "${log_file}" + + # Create tracking file for existing IDs if it doesn't exist + if [ ! -f "$existing_ids_file" ]; then + echo "DEBUG: Creating new tracking file for existing IDs" | tee -a "${log_file}" + touch "$existing_ids_file" + # Extract all movie IDs from existing CSV to avoid duplicates - skip header + if [ -s "$output_csv" ]; then + sed 1d "$output_csv" | awk -F, '{print $3}' | sed 's/"//g' > "$existing_ids_file" + fi + fi + + # Extract data from watched file to temporary file + jq -r '.[] | + # Handle null values safely with defaults + . as $item | + { + title: (try ($item.movie.title) catch null // "Unknown Title"), + year: (try ($item.movie.year | tostring) catch null // ""), + imdb_id: (try ($item.movie.ids.imdb) catch null // ""), + tmdb_id: (try ($item.movie.ids.tmdb | tostring) catch null // ""), + watched_at: (try ($item.last_watched_at) catch null // ""), + trakt_id: (try ($item.movie.ids.trakt | tostring) catch null // ""), + plays: (try ($item.plays | tostring) catch null // "1") + } | + # Build CSV line with safe values + [.title, .year, .imdb_id, .tmdb_id, .watched_at, .trakt_id, .plays] | + @csv' "$watched_file" > "$tmp_file" + + # Process the file line by line + local added_count=0 + local duplicate_count=0 + + while IFS=, read -r title year imdb_id tmdb_id watched_at trakt_id plays; do + # Clean quotes from CSV fields if present + title=$(echo "$title" | sed -e 's/^"//' -e 's/"$//') + year=$(echo "$year" | sed -e 's/^"//' -e 's/"$//') + imdb_id=$(echo "$imdb_id" | sed -e 's/^"//' -e 's/"$//') + tmdb_id=$(echo "$tmdb_id" | sed -e 's/^"//' -e 's/"$//') + watched_at=$(echo "$watched_at" | sed -e 's/^"//' -e 's/"$//') + trakt_id=$(echo "$trakt_id" | sed -e 's/^"//' -e 's/"$//') + plays=$(echo "$plays" | sed -e 's/^"//' -e 's/"$//') + + # Skip entries with missing key data + if [ -z "$title" ] || [ "$title" = "null" ]; then + continue + fi + + # Format watched date (keep only YYYY-MM-DD) + watched_at=$(echo "$watched_at" | awk -F'T' '{print $1}') + + # Ensure proper IMDb ID format + if [[ ! "$imdb_id" =~ ^tt ]]; then + imdb_id="tt$imdb_id" + fi + + # Check if this movie is already in our list + if grep -q "$imdb_id" "$existing_ids_file"; then + ((duplicate_count++)) + continue + fi + + # Determine rewatch status + rewatch="false" + if [ -n "$plays" ] && [ "$plays" != "null" ] && [ "$plays" -gt 1 ]; then + rewatch="true" + fi + + # Get rating if available + rating="" + if [ "$has_ratings" = "true" ] && [ -n "$trakt_id" ] && [ "$trakt_id" != "null" ]; then + rating=$(jq -r --arg id "$trakt_id" '.[$id] // ""' "$ratings_lookup" 2>/dev/null) + if [ -n "$rating" ] && [ "$rating" != "null" ]; then + echo "DEBUG: Found rating for $title ($trakt_id): $rating" >> "${log_file}" + fi + fi + + # Add to CSV + echo "\"$title\",\"$year\",\"$imdb_id\",\"$tmdb_id\",\"$watched_at\",\"$rating\",\"$rewatch\"" >> "$output_csv" + + # Add to tracking file + echo "$imdb_id" >> "$existing_ids_file" + + ((added_count++)) + done < "$tmp_file" + + # Report results + local total_count=$(grep -c "," "$output_csv") + echo "📊 Added $added_count new movies from watched list" | tee -a "${log_file}" + echo "📊 Skipped $duplicate_count duplicate movies" | tee -a "${log_file}" + echo "Total movies after combining history and watched list: $total_count movies processed" | tee -a "${log_file}" + fi + + # Clean up + rm -f "$tmp_file" + + return 0 +} + +# Create backup archive +create_backup_archive() { + local backup_dir="$1" + local log_file="$2" + + debug_msg "Creating backup archive" "$log_file" + # Generate a unique backup archive name + backup_archive_name="backup-$(date '+%Y%m%d%H%M%S').tar.gz" + # Create the archive + tar -czvf "${backup_dir}/${backup_archive_name}" -C "$(dirname "${backup_dir}")" "$(basename "${backup_dir}")" > /dev/null 2>&1 + echo -e "Backup completed: ${backup_dir}/${backup_archive_name}" | tee -a "${log_file}" +} + +# Improved function to deduplicate movies based on IMDb ID +deduplicate_movies() { + local input_csv="$1" + local output_csv="$2" + local log_file="$3" + + echo "🔄 Deduplicating movies in CSV file..." | tee -a "${log_file}" + + # Create a temporary file for the deduplicated content + local temp_csv="${input_csv}.dedup" + + # Keep header line + head -n 1 "$input_csv" > "$temp_csv" + + # Track IMDb IDs we've seen + local seen_ids=() + local id_file=$(mktemp) + + # Count total movies + local total_lines=$(wc -l < "$input_csv") + local total_movies=$((total_lines - 1)) + echo "📊 Total movies before deduplication: $total_movies" | tee -a "${log_file}" + + # Process each line (skipping header) + cat "$input_csv" | tail -n +2 | while IFS=, read -r title year imdb tmdb date rating rewatch; do + # Extract IMDb ID (remove quotes if present) + clean_imdb=$(echo "$imdb" | sed 's/"//g' | sed 's/^tt//g') + + # Skip if no IMDb ID + if [ -z "$clean_imdb" ]; then + echo "⚠️ Skipping movie with no IMDb ID: $title ($year)" | tee -a "${log_file}" + continue + fi + + # Use a file to track seen IDs (more reliable than array in a subshell) + if ! grep -q "^$clean_imdb$" "$id_file"; then + echo "$clean_imdb" >> "$id_file" + # Ensure IMDb ID has tt prefix + formatted_imdb="\"tt${clean_imdb}\"" + echo "${title},${year},${formatted_imdb},${tmdb},${date},${rating},${rewatch}" >> "$temp_csv" + fi + done + + # Move the deduplicated file to the output + mv "$temp_csv" "$output_csv" + + # Count deduplicated movies + local dedup_lines=$(wc -l < "$output_csv") + local dedup_movies=$((dedup_lines - 1)) + echo "📊 Total movies after deduplication: $dedup_movies" | tee -a "${log_file}" + echo "🔄 Removed $((total_movies - dedup_movies)) duplicate entries" | tee -a "${log_file}" + + # Clean up + rm -f "$id_file" + + return 0 +} + +# Function to limit the number of movies in the CSV if LIMIT_FILMS is set +limit_movies_in_csv() { + local input_csv="$1" + local output_csv="$2" + local log_file="$3" + + # Default to no limit if LIMIT_FILMS is not set + local limit=${LIMIT_FILMS:-0} + + # Check if input file exists and is not empty + if [ ! -f "$input_csv" ]; then + echo "❌ ERROR: Input CSV file does not exist: $input_csv" | tee -a "${log_file}" + return 1 + fi + + # Ensure input file has a header + local header="Title,Year,imdbID,tmdbID,WatchedDate,Rating10,Rewatch" + local first_line=$(head -n 1 "$input_csv") + + # Create a temporary file for processing + local temp_input_csv="${input_csv}.with_header" + + # If the first line is not the expected header, add it + if [ "$first_line" != "$header" ]; then + echo "⚠️ CSV file is missing header, adding it..." | tee -a "${log_file}" + echo "$header" > "$temp_input_csv" + cat "$input_csv" >> "$temp_input_csv" + else + cp "$input_csv" "$temp_input_csv" + fi + + # Check if limit is a positive number + if [[ "$limit" =~ ^[0-9]+$ ]] && [ "$limit" -gt 0 ]; then + echo "🎯 Limiting CSV to the most recent $limit movies..." | tee -a "${log_file}" + + # Create temporary files for processing + local temp_csv="${input_csv}.limited" + local prep_csv="${input_csv}.prep" + local sorted_csv="${input_csv}.sorted" + local final_csv="${input_csv}.final" + + # Keep header line in output + echo "$header" > "$temp_csv" + + # Show sample of dates in the file for debugging + echo "🔍 Sample of watch dates in CSV:" | tee -a "${log_file}" + tail -n +2 "$temp_input_csv" | cut -d, -f5 | tr -d '"' | sort | uniq | head -n 5 | tee -a "${log_file}" + + # Count lines from input (minus header if present) + local input_count=$(tail -n +2 "$temp_input_csv" | wc -l) + echo "📊 Total movies before limiting: $input_count" | tee -a "${log_file}" + + echo "# Preprocessing dates for sorting..." | tee -a "${log_file}" + + # Create a preprocessed CSV with validated dates and a sort key in the first column + # Skip the header as we'll add it back later + tail -n +2 "$temp_input_csv" | while IFS=, read -r title year imdb tmdb date rating rewatch; do + # Clean the date (remove quotes) + clean_date=$(echo "$date" | tr -d '"') + + # Check if the date is in YYYY-MM-DD format + if [[ "$clean_date" =~ ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ ]]; then + # Valid ISO date - use it as sort key + echo "$clean_date|${title}|${year}|${imdb}|${tmdb}|${date}|${rating}|${rewatch}" >> "$prep_csv" + else + # Invalid or empty date - use old date to sort at the end + echo "1970-01-01|${title}|${year}|${imdb}|${tmdb}|${date}|${rating}|${rewatch}" >> "$prep_csv" + echo "⚠️ Invalid date format found: '$date' for movie: $title. Using default." | tee -a "${log_file}" + fi + done + + # Sort by date in reverse order and take only the top N entries + echo "# Sorting movies by date (newest first)..." | tee -a "${log_file}" + if [ -f "$prep_csv" ]; then + # Sort lines by the date prefix (newest first) and keep only the top N + sort -r "$prep_csv" | head -n "$limit" > "$sorted_csv" + + # Now convert back to CSV format by removing the sort key + cat "$sorted_csv" | while IFS='|' read -r sort_key title year imdb tmdb date rating rewatch; do + # Extract TMDB ID for debugging + tmdb_clean="" + if [[ "$tmdb" =~ ([0-9]+) ]]; then + tmdb_clean=${BASH_REMATCH[1]} + echo "🔍 Movie: $title, TMDB: $tmdb_clean, Rating: $rating" >> "${log_file}" + fi + + # Reconstitute the CSV line + echo "${title},${year},${imdb},${tmdb},${date},${rating},${rewatch}" >> "$temp_csv" + done + else + echo "⚠️ No valid data to process after preprocessing" | tee -a "${log_file}" + fi + + # Move the limited file to a temporary final file + cp "$temp_csv" "$final_csv" + + # Find all possible ratings files + echo "# Looking for ratings files in backup directories..." | tee -a "${log_file}" + local backup_dir=$(dirname "$(dirname "$log_file")")/backup + echo "📊 Searching in backup directory: $backup_dir" | tee -a "${log_file}" + + # Find the most recent ratings file + local ratings_file=$(find "$backup_dir" -name '*ratings_movies.json' -type f -print0 | + xargs -0 ls -t | head -n1) + + if [ -n "$ratings_file" ] && [ -f "$ratings_file" ]; then + echo "📊 Found ratings file: $ratings_file" | tee -a "${log_file}" + local ratings_count=$(jq '. | length' "$ratings_file" 2>/dev/null || echo "unknown") + echo "📊 Ratings file contains $ratings_count ratings" | tee -a "${log_file}" + else + echo "⚠️ No ratings file found in backup directories" | tee -a "${log_file}" + fi + + # Apply comprehensive ratings lookup if ratings file exists + if [ -f "$ratings_file" ] && [ -s "$ratings_file" ]; then + echo "# Finding ratings for all selected films..." | tee -a "${log_file}" + + # Create corrected file with header + echo "$header" > "$output_csv" + + # Create temporary lookup files for faster access + local title_ratings_file=$(mktemp) + local imdb_ratings_file=$(mktemp) + local tmdb_ratings_file=$(mktemp) + + echo "# Creating ratings lookup maps..." | tee -a "${log_file}" + # Create lookup by title + jq -r '.[] | [.movie.title, .rating] | @tsv' "$ratings_file" > "$title_ratings_file" + + # Create lookup by IMDb ID + jq -r '.[] | select(.movie.ids.imdb != null) | [.movie.ids.imdb, .rating] | @tsv' "$ratings_file" > "$imdb_ratings_file" + + # Create lookup by TMDB ID + jq -r '.[] | select(.movie.ids.tmdb != null) | [.movie.ids.tmdb | tostring, .rating] | @tsv' "$ratings_file" > "$tmdb_ratings_file" + + # Show sample of lookups + echo "📊 Sample of title ratings lookup (first 3 entries):" | tee -a "${log_file}" + head -n 3 "$title_ratings_file" | tee -a "${log_file}" + + # Variables to track rating matches + local ratings_found=0 + local ratings_total=0 + + # Process the limited CSV and find ratings + tail -n +2 "$final_csv" | while IFS=, read -r title year imdb tmdb date rating rewatch; do + ((ratings_total++)) + + # Clean fields (remove quotes) + clean_title=$(echo "$title" | tr -d '"') + clean_imdb=$(echo "$imdb" | tr -d '"') + clean_tmdb=$(echo "$tmdb" | tr -d '"') + clean_rating=$(echo "$rating" | tr -d '"') + + # Default to existing rating + local final_rating="$clean_rating" + + # Only look for a new rating if current one is empty + if [ -z "$final_rating" ] || [ "$final_rating" = "null" ]; then + # Try exact title match + local found_rating=$(grep -F "$clean_title" "$title_ratings_file" | head -n1 | cut -f2) + + if [ -n "$found_rating" ]; then + final_rating="$found_rating" + ((ratings_found++)) + echo "✅ Found rating $final_rating for '$clean_title' by title match" | tee -a "${log_file}" + else + # Try by IMDb ID + if [ -n "$clean_imdb" ] && [ "$clean_imdb" != "null" ]; then + imdb_rating=$(grep -F "$clean_imdb" "$imdb_ratings_file" | head -n1 | cut -f2) + if [ -n "$imdb_rating" ]; then + final_rating="$imdb_rating" + ((ratings_found++)) + echo "✅ Found rating $final_rating for '$clean_title' by IMDb ID ($clean_imdb)" | tee -a "${log_file}" + fi + fi + + # Try by TMDB ID if still no rating + if { [ -z "$final_rating" ] || [ "$final_rating" = "null" ]; } && [ -n "$clean_tmdb" ] && [ "$clean_tmdb" != "null" ]; then + tmdb_rating=$(grep -F "$clean_tmdb" "$tmdb_ratings_file" | head -n1 | cut -f2) + if [ -n "$tmdb_rating" ]; then + final_rating="$tmdb_rating" + ((ratings_found++)) + echo "✅ Found rating $final_rating for '$clean_title' by TMDB ID ($clean_tmdb)" | tee -a "${log_file}" + fi + fi + + # If still no rating, try direct lookup from the ratings file + if [ -z "$final_rating" ] || [ "$final_rating" = "null" ]; then + # Try a case-insensitive grep search + local grep_result=$(grep -i "\"title\": \"$clean_title\"" "$ratings_file" -A 10 | grep -m 1 "\"rating\":" | awk -F: '{print $2}' | tr -d ' ,') + if [ -n "$grep_result" ]; then + final_rating="$grep_result" + ((ratings_found++)) + echo "✅ Found rating $final_rating for '$clean_title' by direct file search" | tee -a "${log_file}" + fi + fi + fi + else + echo "ℹ️ Using existing rating $final_rating for '$clean_title'" | tee -a "${log_file}" + ((ratings_found++)) + fi + + # Reconstruct the line with the rating + echo "\"$clean_title\",\"$year\",\"$clean_imdb\",\"$clean_tmdb\",\"$date\",\"$final_rating\",\"$rewatch\"" >> "$output_csv" + done + + # Report ratings stats + echo "📊 Ratings found: $ratings_found out of $ratings_total movies" | tee -a "${log_file}" + + # Clean up + rm -f "$title_ratings_file" "$imdb_ratings_file" "$tmdb_ratings_file" + else + # No ratings file available, just use the original ratings + echo "⚠️ No valid ratings file found, using existing ratings" | tee -a "${log_file}" + cp "$final_csv" "$output_csv" + fi + + # Count final movies + local final_count=$(tail -n +2 "$output_csv" | wc -l) + echo "📊 Keeping only the $final_count most recent movies" | tee -a "${log_file}" + + # Debug: Show the dates that were kept + echo "📅 Watch dates of kept movies (newest first):" | tee -a "${log_file}" + echo "---------------------------------------------" | tee -a "${log_file}" + # Print the first 10 entries with title and date + tail -n +2 "$output_csv" | head -n 10 | while IFS=, read -r title year imdb tmdb date rating rewatch; do + echo "Film: $title - Date: $date - Rating: $rating" | tee -a "${log_file}" + done + echo "---------------------------------------------" | tee -a "${log_file}" + + # Clean up + rm -f "$temp_input_csv" "$temp_csv" "$prep_csv" "$sorted_csv" "$final_csv" + + return 0 + else + # No limit needed + echo "📊 No movie limit applied (LIMIT_FILMS=$limit)" | tee -a "${log_file}" + + # Just ensure header and copy + cp "$temp_input_csv" "$output_csv" + + # Clean up + rm -f "$temp_input_csv" + + return 0 + fi +} + +# Add this function call at the end of process_data function \ No newline at end of file diff --git a/lib/health_check.sh b/lib/health_check.sh new file mode 100755 index 0000000..8ac45b4 --- /dev/null +++ b/lib/health_check.sh @@ -0,0 +1,125 @@ +#!/bin/bash +# health_check.sh - Health check script for Docker container + +# Source config if available +CONFIG_FILE="${CONFIG_DIR:-/app/config}/.config.cfg" +if [[ -f "$CONFIG_FILE" ]]; then + source "$CONFIG_FILE" +fi + +# Check status variables +STATUS="ok" +DETAILS=() + +# Check for required directories +check_directory() { + if [[ ! -d "$1" ]]; then + STATUS="error" + DETAILS+=("Directory $1 not found or not accessible") + return 1 + elif [[ ! -w "$1" ]]; then + STATUS="error" + DETAILS+=("Directory $1 not writable") + return 1 + fi + return 0 +} + +# Check for required files +check_file() { + if [[ ! -f "$1" ]]; then + STATUS="error" + DETAILS+=("File $1 not found or not accessible") + return 1 + elif [[ ! -r "$1" ]]; then + STATUS="error" + DETAILS+=("File $1 not readable") + return 1 + fi + return 0 +} + +# Check for required commands +check_command() { + if ! command -v "$1" &> /dev/null; then + STATUS="error" + DETAILS+=("Command $1 not found or not executable") + return 1 + fi + return 0 +} + +# Check API connectivity (if tokens are available) +check_api_connectivity() { + if [[ -n "$ACCESS_TOKEN" && -n "$API_URL" ]]; then + # Attempt a simple API call + local api_response + api_response=$(curl -s -f -H "Content-Type: application/json" \ + -H "Authorization: Bearer $ACCESS_TOKEN" \ + -H "trakt-api-version: 2" \ + -H "trakt-api-key: $API_KEY" \ + "${API_URL}/users/settings" 2>&1) + + if [[ $? -ne 0 ]]; then + STATUS="warning" + DETAILS+=("Cannot connect to Trakt API: $api_response") + return 1 + fi + else + STATUS="warning" + DETAILS+=("API credentials not configured") + return 1 + fi + return 0 +} + +# Run all checks +run_health_checks() { + # Check essential directories + check_directory "${DOSLOG:-/app/logs}" + check_directory "${DOSCOPY:-/app/copy}" + check_directory "${BACKUP_DIR:-/app/backup}" + check_directory "${CONFIG_DIR:-/app/config}" + + # Check essential files + check_file "/app/Export_Trakt_4_Letterboxd.sh" + check_file "/app/docker-entrypoint.sh" + + # Check essential commands + check_command "bash" + check_command "curl" + check_command "jq" + check_command "sed" + + # Check API connectivity + check_api_connectivity + + # Prepare response + local health_response + health_response=$(cat </dev/null || echo "${BASH_SOURCE[0]}")" )" && pwd )" +BASE_DIR="$(dirname "$SCRIPT_DIR")" # This should be the root directory of the project + +# Used for error message if language initialization fails +MSG_ERROR_MISSING_LANG_FILE="Error: Language file not found. Using English defaults." + +# Global variables +LANG_DIR="" +CURRENT_LANG="en" +AVAILABLE_LANGS=("en" "fr" "es" "de" "it") + +# Initialize the i18n system +init_i18n() { + local script_dir="$1" + local log_file="$2" + + # Set language directory path correctly + LANG_DIR="${BASE_DIR}/locales" + + echo "DEBUG: Language directory set to: $LANG_DIR" | tee -a "${log_file}" + + # Check if language directory exists + if [ ! -d "$LANG_DIR" ]; then + echo "Creating language directory: $LANG_DIR" | tee -a "${log_file}" + mkdir -p "$LANG_DIR" + fi + + # Load language from config or use default + if [ -n "$LANGUAGE" ]; then + set_language "$LANGUAGE" "$log_file" + else + # Try to detect system language + detect_system_language "$log_file" + fi + + # Load messages for the current language + load_language_messages "$log_file" + + echo "Internationalization initialized. Current language: $CURRENT_LANG" | tee -a "${log_file}" +} + +# Set language to use +set_language() { + local lang="$1" + local log_file="$2" + + # Check if the language is supported + local is_supported=0 + for supported_lang in "${AVAILABLE_LANGS[@]}"; do + if [ "$lang" == "$supported_lang" ]; then + is_supported=1 + break + fi + done + + if [ $is_supported -eq 1 ]; then + CURRENT_LANG="$lang" + echo "Language set to: $CURRENT_LANG" | tee -a "${log_file}" + else + echo "Warning: Language '$lang' is not supported. Using default (en)." | tee -a "${log_file}" + CURRENT_LANG="en" + fi +} + +# Detect system language +detect_system_language() { + local log_file="$1" + local system_lang="" + + # Try to get system language + if [ -n "$LANG" ]; then + system_lang="${LANG:0:2}" + elif [ -n "$LC_ALL" ]; then + system_lang="${LC_ALL:0:2}" + elif [ -n "$LC_MESSAGES" ]; then + system_lang="${LC_MESSAGES:0:2}" + fi + + echo "Detected system language: ${system_lang:-unknown}" | tee -a "${log_file}" + + # If we got a valid language and it's supported, use it + if [ -n "$system_lang" ]; then + set_language "$system_lang" "$log_file" + else + # Otherwise use default + CURRENT_LANG="en" + echo "Using default language: en" | tee -a "${log_file}" + fi +} + +# Load messages for the current language +load_language_messages() { + local log_file="$1" + local messages_file="${LANG_DIR}/${CURRENT_LANG}/LC_MESSAGES/messages.sh" + + echo "DEBUG: Looking for messages file at: $messages_file" | tee -a "${log_file}" + + # Start with loading default English messages + local default_messages_file="${LANG_DIR}/en/LC_MESSAGES/messages.sh" + if [ -f "$default_messages_file" ]; then + echo "DEBUG: Loading default English messages from: $default_messages_file" | tee -a "${log_file}" + source "$default_messages_file" + else + echo "Warning: Default English messages file not found at $default_messages_file. Using internal defaults." | tee -a "${log_file}" + load_default_messages + fi + + # If current language is not English, load the specific language file to override defaults + if [ "$CURRENT_LANG" != "en" ] && [ -f "$messages_file" ]; then + echo "DEBUG: Loading language specific messages from: $messages_file" | tee -a "${log_file}" + source "$messages_file" + echo "Loaded language messages from: $messages_file" | tee -a "${log_file}" + elif [ "$CURRENT_LANG" != "en" ]; then + echo "Warning: No messages file found for language '$CURRENT_LANG' at $messages_file. Using English defaults." | tee -a "${log_file}" + fi +} + +# Load default (English) messages if translation file is not found +load_default_messages() { + # Define default English messages + MSG_welcome="Welcome to Export Trakt 4 Letterboxd" + MSG_starting="Starting script" + MSG_script_execution_start="Script execution started" + MSG_processing_option="Processing option" + MSG_no_option="No option provided, using default" + MSG_retrieving_info="Retrieving information" + MSG_checking_dependencies="Checking required dependencies" + MSG_missing_dependencies="Some required dependencies are missing. Please install them before continuing." + MSG_all_dependencies_installed="All required dependencies are installed." + MSG_environment_info="Environment information" + MSG_existing_csv_check="Existing CSV file check" + MSG_error="ERROR" + MSG_warning="WARNING" + MSG_success="SUCCESS" + MSG_script_complete="Script execution completed" + MSG_running_docker="Running in Docker container" + MSG_script_option="Script option" + MSG_none="none" + MSG_user="User" + MSG_working_directory="Working directory" + MSG_script_directory="Script directory" + MSG_copy_directory="Copy directory" + MSG_log_directory="Log directory" + MSG_backup_directory="Backup directory" + MSG_os_type="OS Type" + MSG_file_exists="File exists" + MSG_file_is_readable="File is readable" + MSG_file_is_writable="File is writable" + MSG_file_has_content="File has content" + MSG_file_exists_not="File not found" + MSG_directory_exists="Directory exists" + MSG_directory_permissions="Directory permissions" + MSG_created_backup_directory="Created backup directory" + MSG_backup_directory_exists="Backup directory exists" + MSG_backup_directory_writable="Backup directory is writable" + MSG_backup_directory_not_writable="WARNING: Backup directory is not writable. Check permissions." + MSG_language_set="Language set to" + MSG_running_in="running on" + MSG_auto_detected="auto-detected" +} + +# Get a translated message +get_message() { + local message_key="$1" + local default_message="$2" + local var_name="MSG_${message_key}" + + # If the variable exists, return its value + if [ -n "${!var_name}" ]; then + echo "${!var_name}" + else + # Otherwise return default message if provided + if [ -n "$default_message" ]; then + echo "$default_message" + else + # If no default message, return the key itself + echo "$message_key" + fi + fi +} + +# Translate a message (alias for get_message) +_() { + get_message "$@" +} + +# List available languages +list_languages() { + local log_file="$1" + + echo "Available languages:" | tee -a "${log_file}" + for lang in "${AVAILABLE_LANGS[@]}"; do + if [ "$lang" == "$CURRENT_LANG" ]; then + echo " - $lang (current)" | tee -a "${log_file}" + else + echo " - $lang" | tee -a "${log_file}" + fi + done +} + +# Create a new language file template +create_language_template() { + local lang="$1" + local log_file="$2" + + # Check if language code is valid + if [ -z "$lang" ] || [ ${#lang} -ne 2 ]; then + echo "Invalid language code. Please use a 2-letter ISO language code (e.g., 'en', 'fr')." | tee -a "${log_file}" + return 1 + fi + + # Create directory if it doesn't exist + local lang_dir="${LANG_DIR}/${lang}/LC_MESSAGES" + if [ ! -d "$lang_dir" ]; then + mkdir -p "$lang_dir" + echo "Created directory: $lang_dir" | tee -a "${log_file}" + fi + + # Create template file + local template_file="${lang_dir}/messages.sh" + if [ -f "$template_file" ]; then + echo "Warning: File already exists: $template_file" | tee -a "${log_file}" + read -p "Overwrite? (y/N): " confirm + if [ "$confirm" != "y" ] && [ "$confirm" != "Y" ]; then + echo "Aborted." | tee -a "${log_file}" + return 1 + fi + fi + + # Get the English template as the base + local en_template="${LANG_DIR}/en/LC_MESSAGES/messages.sh" + if [ -f "$en_template" ]; then + cp "$en_template" "$template_file" + echo "Created language template from English messages: $template_file" | tee -a "${log_file}" + else + # Create template content + echo "#!/bin/bash" > "$template_file" + echo "#" >> "$template_file" + echo "# Language: $lang" >> "$template_file" + echo "#" >> "$template_file" + echo "" >> "$template_file" + echo "# Define messages for $lang" >> "$template_file" + echo "# Variables must start with MSG_ to be recognized by the system" >> "$template_file" + echo "" >> "$template_file" + + # Add default messages + load_default_messages + + # Get a list of all message variables + local msg_vars=$(set | grep '^MSG_' | cut -d= -f1) + + # Add each message key with its English value for translation + for var in $msg_vars; do + # Get the message value + local value=${!var} + echo "$var=\"$value\"" >> "$template_file" + done + + echo "Created language template from default messages: $template_file" | tee -a "${log_file}" + fi + + return 0 +} \ No newline at end of file diff --git a/lib/main.sh b/lib/main.sh new file mode 100755 index 0000000..3971a66 --- /dev/null +++ b/lib/main.sh @@ -0,0 +1,246 @@ +#!/bin/bash +# +# Main module - Orchestrates the Trakt to Letterboxd export process +# + +# Import all required modules +import_modules() { + local script_dir="$1" + + # List of modules to import + modules=("config" "utils" "i18n" "trakt_api" "data_processing") + + for module in "${modules[@]}"; do + # First try the local path + if [ -f "${script_dir}/lib/${module}.sh" ]; then + source "${script_dir}/lib/${module}.sh" + # Then try the Docker path + elif [ -f "/app/lib/${module}.sh" ]; then + source "/app/lib/${module}.sh" + else + echo "ERROR: Required module not found: ${module}.sh" + exit 1 + fi + done +} + +# Initialize script environment +initialize_environment() { + local script_dir="$1" + local option="$2" + local log="$3" + + # Print debug information + echo "=========== DEBUG INFORMATION ===========" + echo "Script called with option: $option" + echo "Number of arguments: $#" + if [ -n "$option" ]; then + echo "Option value: '$option'" + else + echo "No option provided, using default" + fi + echo "=========================================" + + # Get sed command based on OS + SED_INPLACE=$(detect_os_sed "$log") + + # Load configuration + load_config "$script_dir" "$log" + + # Initialize internationalization + init_i18n "$script_dir" "$log" + + # Initialize temporary directory + init_temp_dir "$TEMP_DIR" "$log" + + # Ensure required directories exist + ensure_directories "$DOSLOG" "$DOSCOPY" "$log" + + # Log environment information + log_environment "$log" "$script_dir" "$DOSCOPY" "$DOSLOG" "$BACKUP_DIR" + + # Initialize backup directory + init_backup_dir "$BACKUP_DIR" "$log" + + # Check for existing CSV file + if [ -f "${DOSCOPY}/letterboxd_import.csv" ]; then + debug_file_info "${DOSCOPY}/letterboxd_import.csv" "$(_ "existing_csv_check")" "$log" + fi + + # Check for required dependencies + check_dependencies "$log" || exit 1 + + echo -e "$(_ "retrieving_info")" | tee -a "${log}" +} + +# Process command line arguments +process_arguments() { + local script_dir="$1" + local log="$2" + local option="$3" + + # Initialize environment with command line option + initialize_environment "$script_dir" "$option" "$log" + + # Process based on option + case "$option" in + "help"|"-h"|"--help") + show_help "$log" + exit 0 + ;; + "normal"|"") + # Default option - normal export + export_trakt_history "$log" "$DOSCOPY" "$script_dir" + ;; + "update") + # Update export with new entries + echo -e "$(_ "update_export")" | tee -a "${log}" + update_export "$log" "$DOSCOPY" "$script_dir" + ;; + "backup") + # Backup Trakt data + echo -e "$(_ "backup_data")" | tee -a "${log}" + backup_trakt_data "$log" "$BACKUP_DIR" "$script_dir" + ;; + "setup") + # Setup Trakt API + echo -e "$(_ "setup_api")" | tee -a "${log}" + setup_trakt_api "$log" "$script_dir" + ;; + "clean") + # Clean temporary files + echo -e "$(_ "cleaning_files")" | tee -a "${log}" + clean_temp_files "$log" "$TEMP_DIR" "$script_dir" + ;; + *) + echo -e "$(_ "unknown_option") $option" | tee -a "${log}" + show_help "$log" + exit 1 + ;; + esac + + echo -e "$(_ "script_complete")" | tee -a "${log}" +} + +# Show help information +show_help() { + local log="$1" + + echo "$(_ "usage"): ./Export_Trakt_4_Letterboxd.sh [option]" | tee -a "${log}" + echo "" | tee -a "${log}" + echo "$(_ "options"):" | tee -a "${log}" + echo " help $(_ "show_help")" | tee -a "${log}" + echo " normal $(_ "normal_export") ($(_ "default"))" | tee -a "${log}" + echo " update $(_ "update_export")" | tee -a "${log}" + echo " backup $(_ "backup_data")" | tee -a "${log}" + echo " setup $(_ "setup_api")" | tee -a "${log}" + echo " clean $(_ "cleaning_files")" | tee -a "${log}" +} + +# Export Trakt history to CSV +export_trakt_history() { + local log="$1" + local output_dir="$2" + local script_dir="$3" + + echo -e "$(_ "exporting_history")" | tee -a "${log}" + + # Fetch watch history from Trakt API + fetch_watched_history "$log" "$TEMP_DIR" "$script_dir" + + # Process the data into CSV format + process_data_to_csv "$log" "$TEMP_DIR" "$output_dir" "$script_dir" + + echo -e "$(_ "export_complete")" | tee -a "${log}" + echo -e "$(_ "output_location"): ${output_dir}/letterboxd_import.csv" | tee -a "${log}" +} + +# Update export with new entries +update_export() { + local log="$1" + local output_dir="$2" + local script_dir="$3" + + echo -e "$(_ "updating_export")" | tee -a "${log}" + + # Check if previous export exists + if [ ! -f "${output_dir}/letterboxd_import.csv" ]; then + echo -e "$(_ "no_previous_export")" | tee -a "${log}" + echo -e "$(_ "running_full_export")" | tee -a "${log}" + export_trakt_history "$log" "$output_dir" "$script_dir" + return + fi + + # Backup existing export + local backup_file="${output_dir}/letterboxd_import_${DATE}.csv.bak" + cp "${output_dir}/letterboxd_import.csv" "$backup_file" + echo -e "$(_ "previous_export_backed_up"): $backup_file" | tee -a "${log}" + + # Fetch new history since last export + fetch_history_since_last_export "$log" "$TEMP_DIR" "$output_dir" "$script_dir" + + # Merge with existing export + merge_with_existing_export "$log" "$TEMP_DIR" "$output_dir" "$script_dir" + + echo -e "$(_ "update_complete")" | tee -a "${log}" + echo -e "$(_ "output_location"): ${output_dir}/letterboxd_import.csv" | tee -a "${log}" +} + +# Backup Trakt data +backup_trakt_data() { + local log="$1" + local backup_dir="$2" + local script_dir="$3" + + echo -e "$(_ "backup_started")" | tee -a "${log}" + + # Ensure backup directory exists + mkdir -p "$backup_dir" + + # Backup different data types + backup_watched_history "$log" "$backup_dir" "$script_dir" + backup_ratings "$log" "$backup_dir" "$script_dir" + backup_watchlist "$log" "$backup_dir" "$script_dir" + backup_lists "$log" "$backup_dir" "$script_dir" + + echo -e "$(_ "backup_complete")" | tee -a "${log}" + echo -e "$(_ "backup_location"): $backup_dir" | tee -a "${log}" +} + +# Setup Trakt API +setup_trakt_api() { + local log="$1" + local script_dir="$2" + + echo -e "$(_ "setup_started")" | tee -a "${log}" + + # Run the setup script + "$script_dir/setup_trakt.sh" + + echo -e "$(_ "setup_complete")" | tee -a "${log}" +} + +# Clean temporary files +clean_temp_files() { + local log="$1" + local temp_dir="$2" + local script_dir="$3" + + echo -e "$(_ "cleaning_started")" | tee -a "${log}" + + # Clean temporary directory + if [ -d "$temp_dir" ]; then + rm -rf "${temp_dir:?}/"* 2>/dev/null + echo -e "$(_ "temp_dir_cleaned"): $temp_dir" | tee -a "${log}" + else + echo -e "$(_ "temp_dir_not_found"): $temp_dir" | tee -a "${log}" + fi + + # Clean old log files (older than 30 days) + if [ -d "$DOSLOG" ]; then + find "$DOSLOG" -name "*.log" -type f -mtime +30 -delete 2>/dev/null + echo -e "$(_ "old_logs_cleaned")" | tee -a "${log}" + fi + + echo -e "$(_ "cleaning_complete")" | tee -a "${log}" +} \ No newline at end of file diff --git a/lib/trakt_api.sh b/lib/trakt_api.sh new file mode 100755 index 0000000..de8bce6 --- /dev/null +++ b/lib/trakt_api.sh @@ -0,0 +1,313 @@ +#!/bin/bash +# +# Trakt API interaction functions +# + +# Refresh access token if expired +refresh_access_token() { + local refresh_token="$1" + local api_key="$2" + local api_secret="$3" + local redirect_uri="$4" + local config_file="$5" + local sed_inplace="$6" + local log_file="$7" + + echo "🔄 Refreshing Trakt token..." | tee -a "${log_file}" + echo " - Using refresh token: ${refresh_token:0:5}...${refresh_token: -5}" | tee -a "${log_file}" + echo " - API key: ${api_key:0:5}...${api_key: -5}" | tee -a "${log_file}" + + local response=$(curl -s -X POST "https://api.trakt.tv/oauth/token" \ + -H "Content-Type: application/json" -v \ + -d "{ + \"refresh_token\": \"${refresh_token}\", + \"client_id\": \"${api_key}\", + \"client_secret\": \"${api_secret}\", + \"redirect_uri\": \"${redirect_uri}\", + \"grant_type\": \"refresh_token\" + }") + + # Debug response (without exposing sensitive data) + echo " - Response received: $(if [ -n "$response" ]; then echo "✅"; else echo "❌ (empty)"; fi)" | tee -a "${log_file}" + + local new_access_token=$(echo "$response" | jq -r '.access_token') + local new_refresh_token=$(echo "$response" | jq -r '.refresh_token') + + if [[ "$new_access_token" != "null" && "$new_refresh_token" != "null" ]]; then + echo "✅ Token refreshed successfully." | tee -a "${log_file}" + echo " - New access token: ${new_access_token:0:5}...${new_access_token: -5}" | tee -a "${log_file}" + echo " - New refresh token: ${new_refresh_token:0:5}...${new_refresh_token: -5}" | tee -a "${log_file}" + + # Determine which config file to update + if [ ! -f "$config_file" ]; then + echo " - Config file not found: $config_file" | tee -a "${log_file}" + return 1 + fi + + echo " - Updating config file: $config_file" | tee -a "${log_file}" + + # Check if config file exists and is writable + if [ -f "$config_file" ]; then + if [ -w "$config_file" ]; then + echo " - Config file is writable: ✅" | tee -a "${log_file}" + else + echo " - Config file is writable: ❌ - Permissions: $(ls -la "$config_file" | awk '{print $1}')" | tee -a "${log_file}" + return 1 + fi + else + echo " - Config file exists: ❌ (not found)" | tee -a "${log_file}" + return 1 + fi + + $sed_inplace "s|ACCESS_TOKEN=.*|ACCESS_TOKEN=\"$new_access_token\"|" "$config_file" + $sed_inplace "s|REFRESH_TOKEN=.*|REFRESH_TOKEN=\"$new_refresh_token\"|" "$config_file" + + echo " - Config file updated: $(if [ $? -eq 0 ]; then echo "✅"; else echo "❌"; fi)" | tee -a "${log_file}" + + # Return the new tokens as a string + echo "${new_access_token}:${new_refresh_token}" + return 0 + else + echo "❌ Error refreshing token. Check your configuration!" | tee -a "${log_file}" + echo " - Response: $response" | tee -a "${log_file}" + echo " - Make sure your API credentials are correct and try again." | tee -a "${log_file}" + return 1 + fi +} + +# Check if the token is valid +check_token_validity() { + local api_url="$1" + local api_key="$2" + local access_token="$3" + local log_file="$4" + + echo "🔒 Checking token validity..." | tee -a "${log_file}" + + local response=$(curl -s -X GET "${api_url}/users/me/history/movies" \ + -H "Content-Type: application/json" \ + -H "trakt-api-key: ${api_key}" \ + -H "trakt-api-version: 2" \ + -H "Authorization: Bearer ${access_token}") + + if echo "$response" | grep -q "invalid_grant"; then + echo "⚠️ Token expired or invalid" | tee -a "${log_file}" + return 1 + else + echo "✅ Token is valid" | tee -a "${log_file}" + return 0 + fi +} + +# Get the latest backup directory +get_latest_backup_dir() { + local base_dir="$1" + local log_file="$2" + + echo "🔍 Using provided backup directory: $base_dir" | tee -a "${log_file}" + + # Just return the base_dir since it's already a timestamped directory created in run_export + echo "$base_dir" +} + +# Fetch data from Trakt API +fetch_trakt_data() { + local api_url="$1" + local api_key="$2" + local access_token="$3" + local endpoint="$4" + local output_file="$5" + local username="$6" + local log_file="$7" + + # Check if tokens are defined + if [ -z "$access_token" ] || [ "$access_token" = '""' ] || [ "$access_token" = "" ]; then + echo -e "\e[31mERROR: ACCESS_TOKEN not defined. Run the setup_trakt.sh script first to get a token.\e[0m" | tee -a "${log_file}" + echo -e "Command: ./setup_trakt.sh" | tee -a "${log_file}" + return 1 + fi + + # Create directory for output file if it doesn't exist + mkdir -p "$(dirname "$output_file")" + + echo "📥 Requesting data from: ${api_url}/users/me/${endpoint}" | tee -a "${log_file}" + echo "🔑 Using access token: ${access_token:0:5}...${access_token: -5}" | tee -a "${log_file}" + echo "💾 Saving to: ${output_file}" | tee -a "${log_file}" + + # Set up initial pagination parameters + local page=1 + local limit=50000 # Maximum allowed by Trakt API + # No safety limit (will use total_pages from API response) + local total_items=0 + local total_pages=0 + local temp_file="${output_file}.temp" + local success=false + local max_retries=3 + local retry_count=0 + + # Initialize an empty array in our temp file + echo "[]" > "$temp_file" + + # Make initial request to get total page count from headers + local initial_response_headers=$(curl -s -I -X GET "${api_url}/users/me/${endpoint}?page=1&limit=${limit}" \ + -H "Content-Type: application/json" \ + -H "trakt-api-key: ${api_key}" \ + -H "trakt-api-version: 2" \ + -H "Authorization: Bearer ${access_token}") + + # Try to extract total page count from X-Pagination-Page-Count header + if [[ "$initial_response_headers" == *"X-Pagination-Page-Count:"* ]]; then + total_pages=$(echo "$initial_response_headers" | grep -i "X-Pagination-Page-Count:" | awk '{print $2}' | tr -d '\r') + local total_count=$(echo "$initial_response_headers" | grep -i "X-Pagination-Item-Count:" | awk '{print $2}' | tr -d '\r' || echo "?") + + echo "📚 PAGES ESTIMATION FOR $endpoint:" | tee -a "${log_file}" + echo " ┌─────────────────────────────────────────────┐" | tee -a "${log_file}" + echo " │ Total pages: $total_pages (limit: $limit items/page) │" | tee -a "${log_file}" + echo " │ Total items: $total_count │" | tee -a "${log_file}" + + # Calculate estimated time (2s per page + 1s retry buffer) + if [[ "$total_pages" =~ ^[0-9]+$ ]]; then + local estimated_seconds=$((total_pages * 3)) + local estimated_minutes=$((estimated_seconds / 60)) + local remaining_seconds=$((estimated_seconds % 60)) + echo " │ Est. time: ~${estimated_minutes}m ${remaining_seconds}s │" | tee -a "${log_file}" + fi + + echo " └─────────────────────────────────────────────┘" | tee -a "${log_file}" + else + echo "⚠️ Could not determine total page count from API response" | tee -a "${log_file}" + echo "⚠️ Will fetch pages until no more data is returned" | tee -a "${log_file}" + # Set a large default value to effectively remove the limit + total_pages=1000000 + fi + + # Make paginated API requests until we get all data + while [ $page -le $total_pages ]; do + retry_count=0 + local page_success=false + + # Calculate and display progress percentage + if [[ "$total_pages" =~ ^[0-9]+$ ]] && [ $total_pages -gt 0 ]; then + local progress_pct=$((page * 100 / total_pages)) + local progress_bar="" + local bar_width=20 + local filled_width=$((progress_pct * bar_width / 100)) + local empty_width=$((bar_width - filled_width)) + + # Create the progress bar + progress_bar="[" + for ((i=0; i/dev/null && [ "$(echo "$response" | jq 'length')" -gt 0 ]; then + # Save the current items and merge with previous pages + echo "$response" > "${temp_file}.page${page}" + + # Merge with existing data + jq -s 'add' "$temp_file" "${temp_file}.page${page}" > "${temp_file}.new" + mv "${temp_file}.new" "$temp_file" + rm "${temp_file}.page${page}" + + # Get item count for this page + local items_count=$(echo "$response" | jq 'length') + total_items=$((total_items + items_count)) + echo "✅ Page $page: Retrieved $items_count items (running total: $total_items)" | tee -a "${log_file}" + + # If fewer items than the limit, we've reached the end + if [ $items_count -lt $limit ]; then + echo "🏁 Reached end of data for endpoint $endpoint (fewer items than limit)" | tee -a "${log_file}" + success=true + break + fi + + page_success=true + page=$((page + 1)) + elif [ $retry_count -lt $((max_retries - 1)) ]; then + retry_count=$((retry_count + 1)) + echo "⚠️ Retry $retry_count for page $page (endpoint: $endpoint)" | tee -a "${log_file}" + sleep 2 # Wait before retrying + else + echo -e "\e[33mWARNING: Failed to retrieve page $page for endpoint $endpoint after $max_retries attempts.\e[0m" | tee -a "${log_file}" + # If we got at least one page successfully, consider it a partial success + if [ $total_items -gt 0 ]; then + success=true + echo "⚠️ Continuing with partial data ($total_items items)" | tee -a "${log_file}" + break + else + echo -e "\e[31mERROR: Failed to retrieve any data for endpoint $endpoint.\e[0m" | tee -a "${log_file}" + return 1 + fi + fi + done + + # If we've exhausted all pages or reached the end, break + if [ "$page_success" != "true" ] || [ "$success" = "true" ]; then + break + fi + done + + # If we got here with data, move the temporary file to the final location + if [ "$success" = "true" ] || [ $total_items -gt 0 ]; then + mv "$temp_file" "$output_file" + echo "📊 Successfully saved $total_items items for endpoint $endpoint" | tee -a "${log_file}" + echo -e "\e[32m${username}/${endpoint}\e[0m Retrieved successfully ($total_items items in ${page} pages)" | tee -a "${log_file}" + return 0 + else + echo -e "\e[31mERROR: Failed to retrieve data for endpoint ${endpoint}.\e[0m" | tee -a "${log_file}" + # If we have a temporary file but no data, clean up + rm -f "$temp_file" + return 1 + fi +} + +# Get endpoints based on mode +get_endpoints_for_mode() { + local mode="$1" + local log_file="$2" + + case "$mode" in + "complete") + echo -e "Complete Mode activated" | tee -a "${log_file}" + echo "watchlist/movies watchlist/shows watchlist/episodes watchlist/seasons ratings/movies ratings/shows ratings/episodes ratings/seasons collection/movies collection/shows watched/movies watched/shows history/movies history/shows" + ;; + "initial") + echo -e "Initial Mode activated" | tee -a "${log_file}" + echo "history/movies ratings/movies watched/movies" + ;; + *) + echo -e "Normal Mode activated" | tee -a "${log_file}" + echo "history/movies ratings/movies ratings/episodes history/movies history/shows history/episodes watchlist/movies watchlist/shows" + ;; + esac +} \ No newline at end of file diff --git a/lib/utils.sh b/lib/utils.sh new file mode 100755 index 0000000..8fcf471 --- /dev/null +++ b/lib/utils.sh @@ -0,0 +1,117 @@ +#!/bin/bash +# +# Utility and debugging functions +# + +# Debug messaging function +debug_msg() { + local message="$1" + local log_file="$2" + + echo -e "DEBUG: $message" | tee -a "${log_file}" +} + +# File manipulation debug function +debug_file_info() { + local file="$1" + local message="$2" + local log_file="$3" + + echo "📄 $message:" | tee -a "${log_file}" + if [ -f "$file" ]; then + echo " - $(_ "file_exists"): ✅" | tee -a "${log_file}" + echo " - File size: $(stat -f%z "$file" 2>/dev/null || stat -c%s "$file" 2>/dev/null || echo "unknown") bytes" | tee -a "${log_file}" + echo " - File permissions: $(ls -la "$file" | awk '{print $1}')" | tee -a "${log_file}" + echo " - Owner: $(ls -la "$file" | awk '{print $3":"$4}')" | tee -a "${log_file}" + + # Check if file is readable + if [ -r "$file" ]; then + echo " - $(_ "file_is_readable"): ✅" | tee -a "${log_file}" + else + echo " - $(_ "file_is_readable"): ❌" | tee -a "${log_file}" + fi + + # Check if file is writable + if [ -w "$file" ]; then + echo " - $(_ "file_is_writable"): ✅" | tee -a "${log_file}" + else + echo " - $(_ "file_is_writable"): ❌" | tee -a "${log_file}" + fi + + # Check if file has content + if [ -s "$file" ]; then + echo " - $(_ "file_has_content"): ✅" | tee -a "${log_file}" + echo " - First line: $(head -n 1 "$file" 2>/dev/null || echo "Cannot read file")" | tee -a "${log_file}" + echo " - Line count: $(wc -l < "$file" 2>/dev/null || echo "Cannot count lines")" | tee -a "${log_file}" + else + echo " - $(_ "file_has_content"): ❌ (empty file)" | tee -a "${log_file}" + fi + else + echo " - $(_ "file_exists"): ❌ ($(_ "file_exists_not"))" | tee -a "${log_file}" + echo " - $(_ "directory_exists"): $(if [ -d "$(dirname "$file")" ]; then echo "✅"; else echo "❌"; fi)" | tee -a "${log_file}" + echo " - $(_ "directory_permissions"): $(ls -la "$(dirname "$file")" 2>/dev/null | head -n 1 | awk '{print $1}' || echo "Cannot access directory")" | tee -a "${log_file}" + fi + echo "-----------------------------------" | tee -a "${log_file}" +} + +# Check if command exists +command_exists() { + command -v "$1" >/dev/null 2>&1 +} + +# Check for required dependencies +check_dependencies() { + local log_file="$1" + local missing=0 + + echo "🔍 $(_ "checking_dependencies"):" | tee -a "${log_file}" + + for cmd in curl jq sed awk; do + if command_exists "$cmd"; then + echo " - $cmd: ✅" | tee -a "${log_file}" + else + echo " - $cmd: ❌ (missing)" | tee -a "${log_file}" + missing=1 + fi + done + + if [ $missing -eq 1 ]; then + echo "❌ $(_ "missing_dependencies")" | tee -a "${log_file}" + return 1 + else + echo "✅ $(_ "all_dependencies_installed")" | tee -a "${log_file}" + return 0 + fi +} + +# Print progress bar +progress_bar() { + local current="$1" + local total="$2" + local prefix="$3" + local log_file="$4" + local width=50 + local percentage=$((current * 100 / total)) + local completed=$((width * current / total)) + local remaining=$((width - completed)) + + printf "\r%s [%s%s] %d%%" "$prefix" "$(printf "%${completed}s" | tr ' ' '=')" "$(printf "%${remaining}s" | tr ' ' ' ')" "$percentage" + + if [ "$current" -eq "$total" ]; then + echo "" + echo "$prefix completed (100%)" | tee -a "${log_file}" + fi +} + +# Error handling function +handle_error() { + local error_message="$1" + local error_code="$2" + local log_file="$3" + + echo "❌ $(_ "error"): $error_message" | tee -a "${log_file}" + + if [ -n "$error_code" ]; then + exit "$error_code" + fi +} \ No newline at end of file diff --git a/locales/de/LC_MESSAGES/messages.sh b/locales/de/LC_MESSAGES/messages.sh new file mode 100755 index 0000000..7590f59 --- /dev/null +++ b/locales/de/LC_MESSAGES/messages.sh @@ -0,0 +1,138 @@ +#!/bin/bash +# +# Language: de +# + +# Define messages for de +# Variables must start with MSG_ to be recognized by the system + +MSG_CONFIG_CREATED="Konfigurationsdatei erstellt" +MSG_CONFIG_ERROR="Fehler in der Konfigurationsdatei" +MSG_CONFIG_LOADED="Konfiguration geladen" +MSG_CONFIG_NOT_FOUND="Konfigurationsdatei nicht gefunden" +MSG_CONFIG_SAVED="Konfiguration gespeichert" +MSG_ERROR="Fehler" +MSG_ERROR_MISSING_LANG_FILE="Fehler: Sprachdatei nicht gefunden. Englische Standardwerte werden verwendet." +MSG_EXPORT_FAILED="Exportvorgang fehlgeschlagen" +MSG_EXPORT_FILE_CREATED="Exportdatei erstellt: %s" +MSG_EXPORT_FINISHED="Exportvorgang abgeschlossen" +MSG_EXPORT_NO_DATA="Keine Daten zum Exportieren" +MSG_EXPORT_STARTING="Exportvorgang wird gestartet" +MSG_FAILED="Fehlgeschlagen" +MSG_FILE_CREATED="Datei erstellt: %s" +MSG_FILE_DELETED="Datei gelöscht: %s" +MSG_FILE_NOT_FOUND="Datei nicht gefunden: %s" +MSG_FILE_PERMISSION_DENIED="Zugriff auf Datei verweigert: %s" +MSG_FILE_UPDATED="Datei aktualisiert: %s" +MSG_GOODBYE="Auf Wiedersehen" +MSG_HELLO="Hallo" +MSG_INFO="Information" +MSG_SCRIPT_ERROR="Bei der Ausführung des Skripts ist ein Fehler aufgetreten" +MSG_SCRIPT_FINISHED="Export Trakt 4 Letterboxd Skript beendet" +MSG_SCRIPT_INTERRUPTED="Skript vom Benutzer unterbrochen" +MSG_SCRIPT_STARTING="Export Trakt 4 Letterboxd Skript wird gestartet" +MSG_SUCCESS="Erfolgreich" +MSG_TRAKT_API_ERROR="Fehler bei der Verbindung zur Trakt API" +MSG_TRAKT_API_RATE_LIMIT="Trakt API-Ratenlimit erreicht, warte..." +MSG_TRAKT_AUTH_FAILED="Trakt-Authentifizierung fehlgeschlagen" +MSG_TRAKT_AUTH_REQUIRED="Trakt-Authentifizierung erforderlich" +MSG_TRAKT_AUTH_SUCCESS="Trakt-Authentifizierung erfolgreich" +MSG_USER_CONFIRM="Möchten Sie fortfahren? (j/N)" +MSG_USER_INPUT_REQUIRED="Bitte geben Sie eine Eingabe ein" +MSG_USER_INVALID_INPUT="Ungültige Eingabe, bitte versuchen Sie es erneut" +MSG_WARNING="Warnung" +MSG_WELCOME="Willkommen bei Export Trakt 4 Letterboxd" +MSG_DONE="Erledigt" +MSG_ABORT="Abbrechen" +MSG_CONTINUE="Fortfahren" +MSG_YES="Ja" +MSG_NO="Nein" +MSG_CONFIRM="Bestätigen" +MSG_CANCEL="Abbrechen" +MSG_EXIT="Beenden" +MSG_HELP="Hilfe" +MSG_INVALID_OPTION="Ungültige Option" +MSG_PROCESSING="Verarbeitung läuft" +MSG_PLEASE_WAIT="Bitte warten" +MSG_SCRIPT_EXECUTION_START="Skriptausführung gestartet" +MSG_SCRIPT_EXECUTION_END="Skriptausführung beendet" +MSG_SCRIPT_OPTION="Skriptoption" +MSG_NONE="keine" +MSG_STARTING="Starten" +MSG_RUNNING_IN="läuft auf" +MSG_LANGUAGE_SET="Sprache eingestellt auf" +MSG_AUTO_DETECTED="automatisch erkannt" +MSG_RUNNING_DOCKER="Läuft in Docker-Container" +MSG_API_REQUEST="API-Anfrage" +MSG_API_RESPONSE="API-Antwort" +MSG_API_ERROR="API-Fehler" +MSG_API_RETRY="Wiederholung" +MSG_API_LIMIT="API-Limit erreicht" +MSG_API_WAIT="Warten vor der nächsten Anfrage" +MSG_API_AUTH_REQUIRED="Authentifizierung erforderlich" +MSG_API_AUTH_SUCCESS="Authentifizierung erfolgreich" +MSG_API_AUTH_FAILURE="Authentifizierung fehlgeschlagen" +MSG_EXPORT_START="Export wird gestartet" +MSG_EXPORT_COMPLETE="Export abgeschlossen" +MSG_EXPORT_PROCESSING="Exportdaten werden verarbeitet" +MSG_EXPORT_FORMATTING="Exportdaten werden formatiert" +MSG_EXPORT_GENERATING="Exportdatei wird generiert" +MSG_EXPORT_SAVING="Exportdatei wird gespeichert" +MSG_EXPORT_SUMMARY="Exportzusammenfassung" +MSG_USER_INPUT="Benutzereingabe" +MSG_USER_SELECTION="Benutzerauswahl" +MSG_USER_CONFIRMATION="Benutzerbestätigung" +MSG_USER_PROMPT="Benutzeraufforderung" +MSG_CONFIG_LOADING="Konfiguration wird geladen" +MSG_CONFIG_SAVING="Konfiguration wird gespeichert" +MSG_CONFIG_MISSING="Konfiguration fehlt" +MSG_CONFIG_UPDATED="Konfiguration aktualisiert" +MSG_CONFIG_DEFAULT="Standardkonfiguration" +MSG_FILE_READ_ERROR="Fehler beim Lesen der Datei" +MSG_FILE_WRITE_ERROR="Fehler beim Schreiben der Datei" +MSG_DIRECTORY_CREATED="Verzeichnis erstellt" +MSG_DIRECTORY_NOT_FOUND="Verzeichnis nicht gefunden" +MSG_TRANSLATION_LOADED="Übersetzung geladen" +MSG_TRANSLATION_MISSING="Übersetzung fehlt" +MSG_TRANSLATION_ERROR="Übersetzungsfehler" +MSG_TRANSLATION_UPDATED="Übersetzung aktualisiert" + +# Legacy keys for backward compatibility +MSG_all_dependencies_installed="Alle erforderlichen Abhängigkeiten sind installiert." +MSG_api_key_check="API-Schlüsselprüfung" +MSG_api_key_found="API-Schlüssel gefunden" +MSG_api_key_not_found="API-Schlüssel nicht gefunden" +MSG_api_secret_check="API-Secret-Prüfung" +MSG_api_secret_found="API-Secret gefunden" +MSG_api_secret_not_found="API-Secret nicht gefunden" +MSG_access_token_check="Zugriffstoken-Prüfung" +MSG_access_token_found="Zugriffstoken gefunden" +MSG_access_token_not_found="Zugriffstoken nicht gefunden" +MSG_refresh_token_check="Aktualisierungstoken-Prüfung" +MSG_refresh_token_found="Aktualisierungstoken gefunden" +MSG_refresh_token_not_found="Aktualisierungstoken nicht gefunden" +MSG_backup_directory="Backup-Verzeichnis" +MSG_backup_directory_exists="Backup-Verzeichnis existiert" +MSG_backup_directory_not_writable="WARNUNG: Backup-Verzeichnis ist nicht beschreibbar. Überprüfen Sie die Berechtigungen." +MSG_backup_directory_writable="Backup-Verzeichnis ist beschreibbar" +MSG_checking_dependencies="Erforderliche Abhängigkeiten werden überprüft" +MSG_copy_directory="Kopierverzeichnis" +MSG_created_backup_directory="Backup-Verzeichnis erstellt" +MSG_directory_exists="Verzeichnis existiert" +MSG_directory_permissions="Verzeichnisberechtigungen" +MSG_environment_info="Umgebungsinformationen" +MSG_existing_csv_check="Überprüfung der vorhandenen CSV-Datei" +MSG_file_exists="Datei existiert" +MSG_file_exists_not="Datei nicht gefunden" +MSG_file_has_content="Datei hat Inhalt" +MSG_file_is_readable="Datei ist lesbar" +MSG_file_is_writable="Datei ist beschreibbar" +MSG_log_directory="Protokollverzeichnis" +MSG_missing_dependencies="Einige erforderliche Abhängigkeiten fehlen. Bitte installieren Sie diese vor dem Fortfahren." +MSG_no_option="Keine Option angegeben, verwende Standardwert" +MSG_os_type="Betriebssystemtyp" +MSG_retrieving_info="Informationen werden abgerufen" +MSG_script_complete="Skriptausführung erfolgreich abgeschlossen" +MSG_script_directory="Skriptverzeichnis" +MSG_user="Benutzer" +MSG_working_directory="Arbeitsverzeichnis" diff --git a/locales/en.json b/locales/en.json deleted file mode 100644 index 53d3380..0000000 --- a/locales/en.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "app": { - "name": "Export Trakt 4 Letterboxd", - "description": "Export your Trakt.tv history to Letterboxd format" - }, - "startup": { - "loading_config": "Loading configuration from {{.path}}", - "starting": "Starting Export Trakt 4 Letterboxd", - "config_loaded": "Configuration loaded successfully" - }, - "export": { - "retrieving_movies": "Retrieving movies from Trakt.tv", - "movies_retrieved": "Retrieved {{.count}} movies from Trakt.tv", - "exporting_movies": "Exporting movies to Letterboxd format", - "export_complete": "Successfully exported {{.count}} movies to {{.path}}", - "export_failed": "Failed to export movies: {{.error}}", - "retrieving_watched_movies": "Retrieving watched movies from Trakt.tv", - "exporting_watched_movies": "Exporting watched movies to Letterboxd format", - "retrieving_collection": "Retrieving collection movies from Trakt.tv", - "collection_retrieved": "Retrieved {{.count}} movies from your Trakt.tv collection", - "exporting_collection": "Exporting collection movies to Letterboxd format", - "collection_export_complete": "Successfully exported {{.count}} collection movies to {{.path}}", - "retrieving_watched_shows": "Retrieving watched shows from Trakt.tv", - "shows_retrieved": "Retrieved {{.shows}} shows with {{.episodes}} episodes from Trakt.tv", - "exporting_shows": "Exporting TV shows to CSV format", - "shows_export_complete": "Successfully exported {{.shows}} shows with {{.episodes}} episodes to {{.path}}", - "retrieving_ratings": "Retrieving movie ratings from Trakt.tv", - "ratings_retrieved": "Retrieved {{.count}} movie ratings from Trakt.tv", - "exporting_ratings": "Exporting movie ratings to Letterboxd format", - "ratings_export_complete": "Successfully exported {{.count}} movie ratings to {{.path}}", - "retrieving_watchlist": "Retrieving movie watchlist from Trakt.tv", - "watchlist_retrieved": "Retrieved {{.count}} movies from your Trakt.tv watchlist", - "exporting_watchlist": "Exporting movie watchlist to Letterboxd format", - "watchlist_export_complete": "Successfully exported {{.count}} watchlist movies to {{.path}}" - }, - "errors": { - "config_load_failed": "Failed to load configuration: {{.error}}", - "api_request_failed": "API request failed: {{.error}}", - "export_dir_create_failed": "Failed to create export directory: {{.error}}", - "file_create_failed": "Failed to create file: {{.error}}", - "log_file_failed": "Failed to set log file: {{.error}}", - "translator_failed": "Failed to initialize translator: {{.error}}", - "invalid_export_type": "Invalid export type: {{.type}}", - "api_url_parse_error": "Failed to parse API URL: {{.error}}", - "api_response_parse_failed": "Failed to parse API response: {{.error}}" - } -} diff --git a/locales/en/LC_MESSAGES/messages.sh b/locales/en/LC_MESSAGES/messages.sh new file mode 100755 index 0000000..2a047a3 --- /dev/null +++ b/locales/en/LC_MESSAGES/messages.sh @@ -0,0 +1,158 @@ +#!/bin/bash +# +# Language: en +# + +# Define messages for en +# Variables must start with MSG_ to be recognized by the system + +# General messages +MSG_HELLO="Hello" +MSG_WELCOME="Welcome to Export Trakt 4 Letterboxd" +MSG_GOODBYE="Goodbye" +MSG_ERROR="Error" +MSG_WARNING="Warning" +MSG_INFO="Information" +MSG_SUCCESS="Success" +MSG_FAILED="Failed" +MSG_DONE="Done" +MSG_ABORT="Abort" +MSG_CONTINUE="Continue" +MSG_YES="Yes" +MSG_NO="No" +MSG_CONFIRM="Confirm" +MSG_CANCEL="Cancel" +MSG_EXIT="Exit" +MSG_HELP="Help" +MSG_INVALID_OPTION="Invalid option" +MSG_PROCESSING="Processing" +MSG_PLEASE_WAIT="Please wait" + +# Script messages +MSG_SCRIPT_STARTING="Starting Export Trakt 4 Letterboxd script" +MSG_SCRIPT_FINISHED="Export Trakt 4 Letterboxd script finished" +MSG_SCRIPT_INTERRUPTED="Script interrupted by user" +MSG_SCRIPT_ERROR="An error occurred while running the script" +MSG_SCRIPT_EXECUTION_START="Script execution started" +MSG_SCRIPT_EXECUTION_END="Script execution ended" +MSG_SCRIPT_OPTION="Script option" +MSG_NONE="none" +MSG_STARTING="Starting" +MSG_RUNNING_IN="running on" +MSG_LANGUAGE_SET="Language set to" +MSG_AUTO_DETECTED="auto-detected" +MSG_RUNNING_DOCKER="Running in Docker container" +MSG_SCRIPT_COMPLETE="Script execution completed successfully" + +# Trakt API messages +MSG_TRAKT_AUTH_REQUIRED="Trakt authentication required" +MSG_TRAKT_AUTH_SUCCESS="Trakt authentication successful" +MSG_TRAKT_AUTH_FAILED="Trakt authentication failed" +MSG_TRAKT_API_ERROR="Error connecting to Trakt API" +MSG_TRAKT_API_RATE_LIMIT="Trakt API rate limit reached, waiting..." +MSG_API_REQUEST="API request" +MSG_API_RESPONSE="API response" +MSG_API_ERROR="API error" +MSG_API_RETRY="Retry" +MSG_API_LIMIT="API limit reached" +MSG_API_WAIT="Waiting before next request" +MSG_API_AUTH_REQUIRED="Authentication required" +MSG_API_AUTH_SUCCESS="Authentication successful" +MSG_API_AUTH_FAILURE="Authentication failed" + +# Export messages +MSG_EXPORT_STARTING="Starting export process" +MSG_EXPORT_FINISHED="Export process completed" +MSG_EXPORT_FAILED="Export process failed" +MSG_EXPORT_NO_DATA="No data to export" +MSG_EXPORT_FILE_CREATED="Export file created: %s" +MSG_EXPORT_START="Starting export" +MSG_EXPORT_COMPLETE="Export completed" +MSG_EXPORT_PROCESSING="Processing export data" +MSG_EXPORT_FORMATTING="Formatting export data" +MSG_EXPORT_GENERATING="Generating export file" +MSG_EXPORT_SAVING="Saving export file" +MSG_EXPORT_SUMMARY="Export summary" + +# User messages +MSG_USER_INPUT_REQUIRED="Please provide input" +MSG_USER_CONFIRM="Do you want to continue? (y/N)" +MSG_USER_INVALID_INPUT="Invalid input, please try again" +MSG_USER_INPUT="User input" +MSG_USER_SELECTION="User selection" +MSG_USER_CONFIRMATION="User confirmation" +MSG_USER_PROMPT="User prompt" +MSG_USER="User" + +# Configuration messages +MSG_CONFIG_LOADED="Configuration loaded" +MSG_CONFIG_SAVED="Configuration saved" +MSG_CONFIG_ERROR="Error in configuration file" +MSG_CONFIG_NOT_FOUND="Configuration file not found" +MSG_CONFIG_CREATED="Configuration file created" +MSG_CONFIG_LOADING="Loading configuration" +MSG_CONFIG_SAVING="Saving configuration" +MSG_CONFIG_MISSING="Configuration missing" +MSG_CONFIG_UPDATED="Configuration updated" +MSG_CONFIG_DEFAULT="Default configuration" + +# File operation messages +MSG_FILE_NOT_FOUND="File not found: %s" +MSG_FILE_CREATED="File created: %s" +MSG_FILE_DELETED="File deleted: %s" +MSG_FILE_UPDATED="File updated: %s" +MSG_FILE_PERMISSION_DENIED="Permission denied for file: %s" +MSG_FILE_READ_ERROR="File read error" +MSG_FILE_WRITE_ERROR="File write error" +MSG_DIRECTORY_CREATED="Directory created" +MSG_DIRECTORY_NOT_FOUND="Directory not found" +MSG_FILE_EXISTS="File exists" +MSG_FILE_EXISTS_NOT="File not found" +MSG_FILE_HAS_CONTENT="File has content" +MSG_FILE_IS_READABLE="File is readable" +MSG_FILE_IS_WRITABLE="File is writable" + +# Translation messages +MSG_ERROR_MISSING_LANG_FILE="Error: Language file not found. Using English defaults." +MSG_TRANSLATION_LOADED="Translation loaded" +MSG_TRANSLATION_MISSING="Translation missing" +MSG_TRANSLATION_ERROR="Translation error" +MSG_TRANSLATION_UPDATED="Translation updated" + +# System and directory messages +MSG_BACKUP_DIRECTORY="Backup directory" +MSG_BACKUP_DIRECTORY_EXISTS="Backup directory exists" +MSG_BACKUP_DIRECTORY_NOT_WRITABLE="WARNING: Backup directory is not writable. Check permissions." +MSG_BACKUP_DIRECTORY_WRITABLE="Backup directory is writable" +MSG_CHECKING_DEPENDENCIES="Checking required dependencies" +MSG_COPY_DIRECTORY="Copy directory" +MSG_CREATED_BACKUP_DIRECTORY="Created backup directory" +MSG_DIRECTORY_EXISTS="Directory exists" +MSG_DIRECTORY_PERMISSIONS="Directory permissions" +MSG_ENVIRONMENT_INFO="Environment information" +MSG_EXISTING_CSV_CHECK="Existing CSV file check" +MSG_LOG_DIRECTORY="Log directory" +MSG_MISSING_DEPENDENCIES="Some required dependencies are missing. Please install them before continuing." +MSG_NO_OPTION="No option provided, using default" +MSG_OS_TYPE="OS Type" +MSG_RETRIEVING_INFO="Retrieving information" +MSG_SCRIPT_DIRECTORY="Script directory" +MSG_WORKING_DIRECTORY="Working directory" + +# API and token messages +MSG_API_KEY_CHECK="API key check" +MSG_API_KEY_FOUND="API key found" +MSG_API_KEY_NOT_FOUND="API key not found" +MSG_API_SECRET_CHECK="API secret check" +MSG_API_SECRET_FOUND="API secret found" +MSG_API_SECRET_NOT_FOUND="API secret not found" +MSG_ACCESS_TOKEN_CHECK="Access token check" +MSG_ACCESS_TOKEN_FOUND="Access token found" +MSG_ACCESS_TOKEN_NOT_FOUND="Access token not found" +MSG_REFRESH_TOKEN_CHECK="Refresh token check" +MSG_REFRESH_TOKEN_FOUND="Refresh token found" +MSG_REFRESH_TOKEN_NOT_FOUND="Refresh token not found" + +# Deprecated keys (kept for backward compatibility) +# These will be removed in future versions +MSG_all_dependencies_installed="All required dependencies are installed." \ No newline at end of file diff --git a/locales/es/LC_MESSAGES/messages.sh b/locales/es/LC_MESSAGES/messages.sh new file mode 100755 index 0000000..455d1ba --- /dev/null +++ b/locales/es/LC_MESSAGES/messages.sh @@ -0,0 +1,138 @@ +#!/bin/bash +# +# Language: es +# + +# Define messages for es +# Variables must start with MSG_ to be recognized by the system + +MSG_CONFIG_CREATED="Archivo de configuración creado" +MSG_CONFIG_ERROR="Error en el archivo de configuración" +MSG_CONFIG_LOADED="Configuración cargada" +MSG_CONFIG_NOT_FOUND="Archivo de configuración no encontrado" +MSG_CONFIG_SAVED="Configuración guardada" +MSG_ERROR="Error" +MSG_ERROR_MISSING_LANG_FILE="Error: Archivo de idioma no encontrado. Usando valores predeterminados en inglés." +MSG_EXPORT_FAILED="Proceso de exportación fallido" +MSG_EXPORT_FILE_CREATED="Archivo de exportación creado: %s" +MSG_EXPORT_FINISHED="Proceso de exportación completado" +MSG_EXPORT_NO_DATA="No hay datos para exportar" +MSG_EXPORT_STARTING="Iniciando proceso de exportación" +MSG_FAILED="Fallido" +MSG_FILE_CREATED="Archivo creado: %s" +MSG_FILE_DELETED="Archivo eliminado: %s" +MSG_FILE_NOT_FOUND="Archivo no encontrado: %s" +MSG_FILE_PERMISSION_DENIED="Permiso denegado para el archivo: %s" +MSG_FILE_UPDATED="Archivo actualizado: %s" +MSG_GOODBYE="Adiós" +MSG_HELLO="Hola" +MSG_INFO="Información" +MSG_SCRIPT_ERROR="Se produjo un error durante la ejecución del script" +MSG_SCRIPT_FINISHED="Script Export Trakt 4 Letterboxd finalizado" +MSG_SCRIPT_INTERRUPTED="Script interrumpido por el usuario" +MSG_SCRIPT_STARTING="Iniciando script Export Trakt 4 Letterboxd" +MSG_SUCCESS="Éxito" +MSG_TRAKT_API_ERROR="Error al conectar con la API de Trakt" +MSG_TRAKT_API_RATE_LIMIT="Límite de tasa de la API de Trakt alcanzado, esperando..." +MSG_TRAKT_AUTH_FAILED="Autenticación de Trakt fallida" +MSG_TRAKT_AUTH_REQUIRED="Se requiere autenticación de Trakt" +MSG_TRAKT_AUTH_SUCCESS="Autenticación de Trakt exitosa" +MSG_USER_CONFIRM="¿Desea continuar? (s/N)" +MSG_USER_INPUT_REQUIRED="Por favor, proporcione una entrada" +MSG_USER_INVALID_INPUT="Entrada inválida, por favor intente de nuevo" +MSG_WARNING="Advertencia" +MSG_WELCOME="Bienvenido a Export Trakt 4 Letterboxd" +MSG_DONE="Hecho" +MSG_ABORT="Abortar" +MSG_CONTINUE="Continuar" +MSG_YES="Sí" +MSG_NO="No" +MSG_CONFIRM="Confirmar" +MSG_CANCEL="Cancelar" +MSG_EXIT="Salir" +MSG_HELP="Ayuda" +MSG_INVALID_OPTION="Opción inválida" +MSG_PROCESSING="Procesando" +MSG_PLEASE_WAIT="Por favor espere" +MSG_SCRIPT_EXECUTION_START="Ejecución del script iniciada" +MSG_SCRIPT_EXECUTION_END="Ejecución del script finalizada" +MSG_SCRIPT_OPTION="Opción del script" +MSG_NONE="ninguna" +MSG_STARTING="Iniciando" +MSG_RUNNING_IN="ejecutándose en" +MSG_LANGUAGE_SET="Idioma establecido a" +MSG_AUTO_DETECTED="auto-detectado" +MSG_RUNNING_DOCKER="Ejecutando en contenedor Docker" +MSG_API_REQUEST="Solicitud API" +MSG_API_RESPONSE="Respuesta API" +MSG_API_ERROR="Error API" +MSG_API_RETRY="Reintento" +MSG_API_LIMIT="Límite API alcanzado" +MSG_API_WAIT="Esperando antes de la próxima solicitud" +MSG_API_AUTH_REQUIRED="Autenticación requerida" +MSG_API_AUTH_SUCCESS="Autenticación exitosa" +MSG_API_AUTH_FAILURE="Autenticación fallida" +MSG_EXPORT_START="Iniciando exportación" +MSG_EXPORT_COMPLETE="Exportación completada" +MSG_EXPORT_PROCESSING="Procesando datos de exportación" +MSG_EXPORT_FORMATTING="Formateando datos de exportación" +MSG_EXPORT_GENERATING="Generando archivo de exportación" +MSG_EXPORT_SAVING="Guardando archivo de exportación" +MSG_EXPORT_SUMMARY="Resumen de exportación" +MSG_USER_INPUT="Entrada del usuario" +MSG_USER_SELECTION="Selección del usuario" +MSG_USER_CONFIRMATION="Confirmación del usuario" +MSG_USER_PROMPT="Solicitud al usuario" +MSG_CONFIG_LOADING="Cargando configuración" +MSG_CONFIG_SAVING="Guardando configuración" +MSG_CONFIG_MISSING="Configuración faltante" +MSG_CONFIG_UPDATED="Configuración actualizada" +MSG_CONFIG_DEFAULT="Configuración predeterminada" +MSG_FILE_READ_ERROR="Error de lectura de archivo" +MSG_FILE_WRITE_ERROR="Error de escritura de archivo" +MSG_DIRECTORY_CREATED="Directorio creado" +MSG_DIRECTORY_NOT_FOUND="Directorio no encontrado" +MSG_TRANSLATION_LOADED="Traducción cargada" +MSG_TRANSLATION_MISSING="Traducción faltante" +MSG_TRANSLATION_ERROR="Error de traducción" +MSG_TRANSLATION_UPDATED="Traducción actualizada" + +# Legacy keys for backward compatibility +MSG_all_dependencies_installed="Todas las dependencias requeridas están instaladas." +MSG_api_key_check="Comprobación de clave API" +MSG_api_key_found="Clave API encontrada" +MSG_api_key_not_found="Clave API no encontrada" +MSG_api_secret_check="Comprobación de secreto API" +MSG_api_secret_found="Secreto API encontrado" +MSG_api_secret_not_found="Secreto API no encontrado" +MSG_access_token_check="Comprobación de token de acceso" +MSG_access_token_found="Token de acceso encontrado" +MSG_access_token_not_found="Token de acceso no encontrado" +MSG_refresh_token_check="Comprobación de token de actualización" +MSG_refresh_token_found="Token de actualización encontrado" +MSG_refresh_token_not_found="Token de actualización no encontrado" +MSG_backup_directory="Directorio de respaldo" +MSG_backup_directory_exists="El directorio de respaldo existe" +MSG_backup_directory_not_writable="ADVERTENCIA: El directorio de respaldo no es escribible. Compruebe los permisos." +MSG_backup_directory_writable="El directorio de respaldo es escribible" +MSG_checking_dependencies="Comprobando dependencias requeridas" +MSG_copy_directory="Directorio de copia" +MSG_created_backup_directory="Directorio de respaldo creado" +MSG_directory_exists="El directorio existe" +MSG_directory_permissions="Permisos de directorio" +MSG_environment_info="Información del entorno" +MSG_existing_csv_check="Comprobación de archivo CSV existente" +MSG_file_exists="El archivo existe" +MSG_file_exists_not="Archivo no encontrado" +MSG_file_has_content="El archivo tiene contenido" +MSG_file_is_readable="El archivo es legible" +MSG_file_is_writable="El archivo es escribible" +MSG_log_directory="Directorio de registros" +MSG_missing_dependencies="Faltan algunas dependencias requeridas. Por favor, instálelas antes de continuar." +MSG_no_option="No se proporcionó ninguna opción, usando el valor predeterminado" +MSG_os_type="Tipo de SO" +MSG_retrieving_info="Recuperando información" +MSG_script_complete="Ejecución del script completada con éxito" +MSG_script_directory="Directorio del script" +MSG_user="Usuario" +MSG_working_directory="Directorio de trabajo" diff --git a/locales/fr.json b/locales/fr.json deleted file mode 100644 index 484a177..0000000 --- a/locales/fr.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "app": { - "name": "Export Trakt 4 Letterboxd", - "description": "Exportez votre historique Trakt.tv au format Letterboxd" - }, - "startup": { - "loading_config": "Chargement de la configuration depuis {{.path}}", - "starting": "Démarrage de Export Trakt 4 Letterboxd", - "config_loaded": "Configuration chargée avec succès" - }, - "export": { - "retrieving_movies": "Récupération des films depuis Trakt.tv", - "movies_retrieved": "{{.count}} films récupérés depuis Trakt.tv", - "exporting_movies": "Exportation des films au format Letterboxd", - "export_complete": "{{.count}} films exportés avec succès vers {{.path}}", - "export_failed": "Échec de l'exportation des films : {{.error}}", - "retrieving_watched_movies": "Récupération des films vus depuis Trakt.tv", - "exporting_watched_movies": "Exportation des films vus au format Letterboxd", - "retrieving_collection": "Récupération des films de votre collection depuis Trakt.tv", - "collection_retrieved": "{{.count}} films récupérés depuis votre collection Trakt.tv", - "exporting_collection": "Exportation des films de votre collection au format Letterboxd", - "collection_export_complete": "{{.count}} films de collection exportés avec succès vers {{.path}}", - "retrieving_watched_shows": "Récupération des séries TV vues depuis Trakt.tv", - "shows_retrieved": "{{.shows}} séries avec {{.episodes}} épisodes récupérés depuis Trakt.tv", - "exporting_shows": "Exportation des séries TV au format CSV", - "shows_export_complete": "{{.shows}} séries avec {{.episodes}} épisodes exportés avec succès vers {{.path}}", - "retrieving_ratings": "Récupération des évaluations de films depuis Trakt.tv", - "ratings_retrieved": "{{.count}} évaluations de films récupérées depuis Trakt.tv", - "exporting_ratings": "Exportation des évaluations de films au format Letterboxd", - "ratings_export_complete": "{{.count}} évaluations de films exportées avec succès vers {{.path}}", - "retrieving_watchlist": "Récupération de la liste de surveillance depuis Trakt.tv", - "watchlist_retrieved": "{{.count}} films récupérés depuis votre liste de surveillance Trakt.tv", - "exporting_watchlist": "Exportation de la liste de surveillance au format Letterboxd", - "watchlist_export_complete": "{{.count}} films de la liste de surveillance exportés avec succès vers {{.path}}" - }, - "errors": { - "config_load_failed": "Échec du chargement de la configuration : {{.error}}", - "api_request_failed": "Échec de la requête API : {{.error}}", - "export_dir_create_failed": "Échec de la création du répertoire d'exportation : {{.error}}", - "file_create_failed": "Échec de la création du fichier : {{.error}}", - "log_file_failed": "Échec de la définition du fichier journal : {{.error}}", - "translator_failed": "Échec de l'initialisation du traducteur : {{.error}}", - "invalid_export_type": "Type d'exportation invalide : {{.type}}", - "api_url_parse_error": "Échec de l'analyse de l'URL de l'API : {{.error}}", - "api_response_parse_failed": "Échec de l'analyse de la réponse de l'API : {{.error}}" - } -} diff --git a/locales/fr/LC_MESSAGES/messages.sh b/locales/fr/LC_MESSAGES/messages.sh new file mode 100755 index 0000000..c63f4b2 --- /dev/null +++ b/locales/fr/LC_MESSAGES/messages.sh @@ -0,0 +1,170 @@ +#!/bin/bash +# +# Language: fr +# + +# Define messages for fr +# Variables must start with MSG_ to be recognized by the system + +# General messages +MSG_HELLO="Bonjour" +MSG_WELCOME="Bienvenue dans Export_Trakt_4_Letterboxd" +MSG_GOODBYE="Au revoir" +MSG_ERROR="ERREUR" +MSG_WARNING="AVERTISSEMENT" +MSG_INFO="INFO" +MSG_SUCCESS="SUCCÈS" +MSG_FAILED="Échec" +MSG_DONE="TERMINÉ" +MSG_ABORT="ABANDON" +MSG_CONTINUE="CONTINUER" +MSG_YES="OUI" +MSG_NO="NON" +MSG_CONFIRM="CONFIRMER" +MSG_CANCEL="ANNULER" +MSG_EXIT="QUITTER" +MSG_HELP="AIDE" +MSG_INVALID_OPTION="Option invalide" +MSG_PROCESSING="Traitement en cours" +MSG_PLEASE_WAIT="Veuillez patienter" + +# Script messages +MSG_SCRIPT_STARTING="Démarrage du script Export Trakt 4 Letterboxd" +MSG_SCRIPT_FINISHED="Script Export Trakt 4 Letterboxd terminé" +MSG_SCRIPT_INTERRUPTED="Script interrompu par l'utilisateur" +MSG_SCRIPT_ERROR="Une erreur s'est produite lors de l'exécution du script" +MSG_SCRIPT_EXECUTION_START="Démarrage de l'exécution du script" +MSG_SCRIPT_EXECUTION_END="Fin de l'exécution du script" +MSG_SCRIPT_OPTION="Option du script" +MSG_NONE="aucune" +MSG_STARTING="Démarrage en cours" +MSG_RUNNING_IN="exécuté sur" +MSG_LANGUAGE_SET="Langue définie" +MSG_AUTO_DETECTED="détection automatique" +MSG_RUNNING_DOCKER="Exécution dans un conteneur Docker" + +# Trakt API messages +MSG_TRAKT_AUTH_REQUIRED="Authentification Trakt requise" +MSG_TRAKT_AUTH_SUCCESS="Authentification Trakt réussie" +MSG_TRAKT_AUTH_FAILED="Échec de l'authentification Trakt" +MSG_TRAKT_API_ERROR="Erreur de connexion à l'API Trakt" +MSG_TRAKT_API_RATE_LIMIT="Limite de débit de l'API Trakt atteinte, en attente..." +MSG_API_REQUEST="Requête API" +MSG_API_RESPONSE="Réponse API" +MSG_API_ERROR="Erreur API" +MSG_API_RETRY="Nouvelle tentative" +MSG_API_LIMIT="Limite d'API atteinte" +MSG_API_WAIT="Attente avant la prochaine requête" +MSG_API_AUTH_REQUIRED="Authentification requise" +MSG_API_AUTH_SUCCESS="Authentification réussie" +MSG_API_AUTH_FAILURE="Échec d'authentification" + +# Export messages +MSG_EXPORT_STARTING="Démarrage du processus d'exportation" +MSG_EXPORT_FINISHED="Processus d'exportation terminé" +MSG_EXPORT_FAILED="Échec du processus d'exportation" +MSG_EXPORT_NO_DATA="Aucune donnée à exporter" +MSG_EXPORT_FILE_CREATED="Fichier d'export créé : %s" +MSG_EXPORT_START="Début de l'exportation" +MSG_EXPORT_COMPLETE="Exportation terminée" +MSG_EXPORT_PROCESSING="Traitement des données d'exportation" +MSG_EXPORT_FORMATTING="Formatage des données d'exportation" +MSG_EXPORT_GENERATING="Génération du fichier d'exportation" +MSG_EXPORT_SAVING="Enregistrement du fichier d'exportation" +MSG_EXPORT_SUMMARY="Résumé de l'exportation" + +# User messages +MSG_USER_INPUT_REQUIRED="Veuillez fournir une entrée" +MSG_USER_CONFIRM="Voulez-vous continuer ? (o/N)" +MSG_USER_INVALID_INPUT="Entrée invalide, veuillez réessayer" +MSG_USER_INPUT="Entrée utilisateur" +MSG_USER_SELECTION="Sélection utilisateur" +MSG_USER_CONFIRMATION="Confirmation utilisateur" +MSG_USER_PROMPT="Invite utilisateur" + +# Configuration messages +MSG_CONFIG_LOADED="Configuration chargée" +MSG_CONFIG_SAVED="Configuration enregistrée" +MSG_CONFIG_ERROR="Erreur dans le fichier de configuration" +MSG_CONFIG_NOT_FOUND="Fichier de configuration introuvable" +MSG_CONFIG_CREATED="Fichier de configuration créé" +MSG_CONFIG_LOADING="Chargement de la configuration" +MSG_CONFIG_SAVING="Enregistrement de la configuration" +MSG_CONFIG_MISSING="Configuration manquante" +MSG_CONFIG_UPDATED="Configuration mise à jour" +MSG_CONFIG_DEFAULT="Configuration par défaut" + +# File operation messages +MSG_FILE_NOT_FOUND="Fichier introuvable : %s" +MSG_FILE_CREATED="Fichier créé : %s" +MSG_FILE_DELETED="Fichier supprimé : %s" +MSG_FILE_UPDATED="Fichier mis à jour : %s" +MSG_FILE_PERMISSION_DENIED="Permission refusée pour le fichier : %s" +MSG_FILE_READ_ERROR="Erreur de lecture du fichier" +MSG_FILE_WRITE_ERROR="Erreur d'écriture du fichier" +MSG_DIRECTORY_CREATED="Répertoire créé" +MSG_DIRECTORY_NOT_FOUND="Répertoire non trouvé" + +# Translation messages +MSG_ERROR_MISSING_LANG_FILE="Erreur : Fichier de langue introuvable. Utilisation des valeurs par défaut en anglais." +MSG_TRANSLATION_LOADED="Traduction chargée" +MSG_TRANSLATION_MISSING="Traduction manquante" +MSG_TRANSLATION_ERROR="Erreur de traduction" +MSG_TRANSLATION_UPDATED="Traduction mise à jour" + +# Anciennes clés en minuscules (pour compatibilité) +# Seulement les clés qui ne sont pas déjà définies en majuscules +MSG_api_key_check="Vérification de la clé API" +MSG_api_key_found="Clé API trouvée" +MSG_api_key_not_found="Clé API non trouvée" +MSG_api_secret_check="Vérification du secret API" +MSG_api_secret_found="Secret API trouvé" +MSG_api_secret_not_found="Secret API non trouvé" +MSG_access_token_check="Vérification du jeton d'accès" +MSG_access_token_found="Jeton d'accès trouvé" +MSG_access_token_not_found="Jeton d'accès non trouvé" +MSG_refresh_token_check="Vérification du jeton de rafraîchissement" +MSG_refresh_token_found="Jeton de rafraîchissement trouvé" +MSG_refresh_token_not_found="Jeton de rafraîchissement non trouvé" +MSG_environment_info="Informations sur l'environnement" +MSG_existing_csv_check="Vérification du fichier CSV existant" +MSG_no_option="Aucune option fournie, utilisation de la valeur par défaut" +MSG_retrieving_info="Récupération des informations" +MSG_checking_dependencies="Vérification des dépendances requises" +MSG_missing_dependencies="Certaines dépendances requises sont manquantes. Veuillez les installer avant de continuer." +MSG_all_dependencies_installed="Toutes les dépendances requises sont installées." +MSG_user="Utilisateur" +MSG_working_directory="Répertoire de travail" +MSG_script_directory="Répertoire du script" +MSG_copy_directory="Répertoire de copie" +MSG_log_directory="Répertoire des journaux" +MSG_backup_directory="Répertoire de sauvegarde" +MSG_os_type="Type d'OS" +MSG_file_exists="Le fichier existe" +MSG_file_is_readable="Le fichier est lisible" +MSG_file_is_writable="Le fichier est modifiable" +MSG_file_has_content="Le fichier a du contenu" +MSG_file_exists_not="Fichier non trouvé" +MSG_directory_exists="Le répertoire existe" +MSG_directory_permissions="Permissions du répertoire" +MSG_created_backup_directory="Répertoire de sauvegarde créé" +MSG_backup_directory_exists="Le répertoire de sauvegarde existe" +MSG_backup_directory_writable="Le répertoire de sauvegarde est modifiable" +MSG_backup_directory_not_writable="AVERTISSEMENT: Le répertoire de sauvegarde n'est pas modifiable. Vérifiez les permissions." + +# Ajout des clés manquantes en minuscules +MSG_welcome="Bienvenue dans Export_Trakt_4_Letterboxd" +MSG_starting="Démarrage en cours" +MSG_script_execution_start="Démarrage de l'exécution du script" +MSG_running_in="exécuté sur" +MSG_script_option="Option du script" +MSG_none="aucune" +MSG_auto_detected="détection automatique" +MSG_language_set="Langue définie sur" +MSG_running_docker="Exécution dans un conteneur Docker" +MSG_error="Erreur" +MSG_warning="Avertissement" +MSG_info="Information" +MSG_success="Succès" +MSG_script_complete="Exécution du script terminée avec succès" +MSG_processing_option="Traitement de l'option" diff --git a/locales/it/LC_MESSAGES/messages.sh b/locales/it/LC_MESSAGES/messages.sh new file mode 100755 index 0000000..4f0f49e --- /dev/null +++ b/locales/it/LC_MESSAGES/messages.sh @@ -0,0 +1,156 @@ +#!/bin/bash +# +# Language: it +# + +# Define messages for it +# Variables must start with MSG_ to be recognized by the system + +# Messaggi generali +MSG_HELLO="Ciao" +MSG_WELCOME="Benvenuto in Export Trakt 4 Letterboxd" +MSG_GOODBYE="Arrivederci" +MSG_ERROR="ERRORE" +MSG_WARNING="AVVISO" +MSG_INFO="INFO" +MSG_SUCCESS="SUCCESSO" +MSG_FAILED="Fallito" +MSG_DONE="COMPLETATO" +MSG_ABORT="INTERROTTO" +MSG_CONTINUE="CONTINUA" +MSG_YES="SÌ" +MSG_NO="NO" +MSG_CONFIRM="CONFERMA" +MSG_CANCEL="ANNULLA" +MSG_EXIT="ESCI" +MSG_HELP="AIUTO" +MSG_INVALID_OPTION="Opzione non valida" +MSG_PROCESSING="Elaborazione in corso" +MSG_PLEASE_WAIT="Attendere prego" + +# Messaggi dello script +MSG_SCRIPT_STARTING="Avvio dello script Export Trakt 4 Letterboxd" +MSG_SCRIPT_FINISHED="Script Export Trakt 4 Letterboxd terminato" +MSG_SCRIPT_INTERRUPTED="Script interrotto dall'utente" +MSG_SCRIPT_ERROR="Si è verificato un errore durante l'esecuzione dello script" +MSG_SCRIPT_EXECUTION_START="Inizio dell'esecuzione dello script" +MSG_SCRIPT_EXECUTION_END="Fine dell'esecuzione dello script" +MSG_SCRIPT_OPTION="Opzione dello script" +MSG_NONE="nessuna" +MSG_STARTING="Avvio in corso" +MSG_RUNNING_IN="in esecuzione su" +MSG_LANGUAGE_SET="Lingua impostata" +MSG_AUTO_DETECTED="rilevamento automatico" +MSG_RUNNING_DOCKER="In esecuzione in un contenitore Docker" + +# Messaggi API Trakt +MSG_TRAKT_AUTH_REQUIRED="Autenticazione Trakt richiesta" +MSG_TRAKT_AUTH_SUCCESS="Autenticazione Trakt riuscita" +MSG_TRAKT_AUTH_FAILED="Autenticazione Trakt fallita" +MSG_TRAKT_API_ERROR="Errore di connessione all'API Trakt" +MSG_TRAKT_API_RATE_LIMIT="Limite di velocità dell'API Trakt raggiunto, in attesa..." +MSG_API_REQUEST="Richiesta API" +MSG_API_RESPONSE="Risposta API" +MSG_API_ERROR="Errore API" +MSG_API_RETRY="Nuovo tentativo" +MSG_API_LIMIT="Limite API raggiunto" +MSG_API_WAIT="Attesa prima della prossima richiesta" +MSG_API_AUTH_REQUIRED="Autenticazione richiesta" +MSG_API_AUTH_SUCCESS="Autenticazione riuscita" +MSG_API_AUTH_FAILURE="Autenticazione fallita" + +# Messaggi di esportazione +MSG_EXPORT_STARTING="Avvio del processo di esportazione" +MSG_EXPORT_FINISHED="Processo di esportazione completato" +MSG_EXPORT_FAILED="Processo di esportazione fallito" +MSG_EXPORT_NO_DATA="Nessun dato da esportare" +MSG_EXPORT_FILE_CREATED="File di esportazione creato: %s" +MSG_EXPORT_START="Inizio dell'esportazione" +MSG_EXPORT_COMPLETE="Esportazione completata" +MSG_EXPORT_PROCESSING="Elaborazione dei dati di esportazione" +MSG_EXPORT_FORMATTING="Formattazione dei dati di esportazione" +MSG_EXPORT_GENERATING="Generazione del file di esportazione" +MSG_EXPORT_SAVING="Salvataggio del file di esportazione" +MSG_EXPORT_SUMMARY="Riepilogo dell'esportazione" + +# Messaggi utente +MSG_USER_INPUT_REQUIRED="Si prega di fornire un input" +MSG_USER_CONFIRM="Vuoi continuare? (s/N)" +MSG_USER_INVALID_INPUT="Input non valido, riprova" +MSG_USER_INPUT="Input utente" +MSG_USER_SELECTION="Selezione utente" +MSG_USER_CONFIRMATION="Conferma utente" +MSG_USER_PROMPT="Prompt utente" + +# Messaggi di configurazione +MSG_CONFIG_LOADED="Configurazione caricata" +MSG_CONFIG_SAVED="Configurazione salvata" +MSG_CONFIG_ERROR="Errore nel file di configurazione" +MSG_CONFIG_NOT_FOUND="File di configurazione non trovato" +MSG_CONFIG_CREATED="File di configurazione creato" +MSG_CONFIG_LOADING="Caricamento della configurazione" +MSG_CONFIG_SAVING="Salvataggio della configurazione" +MSG_CONFIG_MISSING="Configurazione mancante" +MSG_CONFIG_UPDATED="Configurazione aggiornata" +MSG_CONFIG_DEFAULT="Configurazione predefinita" + +# Messaggi operazioni file +MSG_FILE_NOT_FOUND="File non trovato: %s" +MSG_FILE_CREATED="File creato: %s" +MSG_FILE_DELETED="File eliminato: %s" +MSG_FILE_UPDATED="File aggiornato: %s" +MSG_FILE_PERMISSION_DENIED="Permesso negato per il file: %s" +MSG_FILE_READ_ERROR="Errore di lettura del file" +MSG_FILE_WRITE_ERROR="Errore di scrittura del file" +MSG_DIRECTORY_CREATED="Directory creata" +MSG_DIRECTORY_NOT_FOUND="Directory non trovata" + +# Messaggi di traduzione +MSG_ERROR_MISSING_LANG_FILE="Errore: File di lingua non trovato. Utilizzo dei valori predefiniti in inglese." +MSG_TRANSLATION_LOADED="Traduzione caricata" +MSG_TRANSLATION_MISSING="Traduzione mancante" +MSG_TRANSLATION_ERROR="Errore di traduzione" +MSG_TRANSLATION_UPDATED="Traduzione aggiornata" + +# Versioni in minuscolo per compatibilità +MSG_welcome="Benvenuto in Export Trakt 4 Letterboxd" +MSG_starting="Avvio in corso" +MSG_script_execution_start="Inizio dell'esecuzione dello script" +MSG_running_in="in esecuzione su" +MSG_script_option="Opzione dello script" +MSG_none="nessuna" +MSG_auto_detected="rilevamento automatico" +MSG_language_set="Lingua impostata a" +MSG_running_docker="In esecuzione in un contenitore Docker" +MSG_error="Errore" +MSG_warning="Avviso" +MSG_info="Informazione" +MSG_success="Successo" +MSG_script_complete="Esecuzione dello script completata con successo" +MSG_processing_option="Elaborazione dell'opzione" + +MSG_all_dependencies_installed="All required dependencies are installed." +MSG_backup_directory="Backup directory" +MSG_backup_directory_exists="Backup directory exists" +MSG_backup_directory_not_writable="WARNING: Backup directory is not writable. Check permissions." +MSG_backup_directory_writable="Backup directory is writable" +MSG_checking_dependencies="Checking required dependencies" +MSG_copy_directory="Copy directory" +MSG_created_backup_directory="Created backup directory" +MSG_directory_exists="Directory exists" +MSG_directory_permissions="Directory permissions" +MSG_environment_info="Environment information" +MSG_existing_csv_check="Existing CSV file check" +MSG_file_exists="File exists" +MSG_file_exists_not="File not found" +MSG_file_has_content="File has content" +MSG_file_is_readable="File is readable" +MSG_file_is_writable="File is writable" +MSG_log_directory="Log directory" +MSG_missing_dependencies="Some required dependencies are missing. Please install them before continuing." +MSG_no_option="No option provided, using default" +MSG_os_type="OS Type" +MSG_retrieving_info="Retrieving information" +MSG_script_directory="Script directory" +MSG_user="User" +MSG_working_directory="Working directory" diff --git a/logs/.gitkeep b/logs/.gitkeep deleted file mode 100755 index 55aeb16..0000000 --- a/logs/.gitkeep +++ /dev/null @@ -1,2 +0,0 @@ -# This file keeps the logs directory in git while ignoring all log files -# Log files are generated at runtime and should not be committed \ No newline at end of file diff --git a/logs/README.md b/logs/README.md deleted file mode 100755 index 0d276f7..0000000 --- a/logs/README.md +++ /dev/null @@ -1,34 +0,0 @@ -# Logs Directory - -This directory contains application log files generated during runtime. - -## Log Files - -The application generates several types of log files: - -- `export.log` - Main application log -- `app.log` - General application events -- `cron.log` - Scheduled export logs -- `Export_Trakt_4_Letterboxd_YYYY-MM-DD_HH-MM-SS.log` - Timestamped execution logs - -## Log Levels - -The application supports multiple log levels: - -- `ERROR` - Error messages -- `WARN` - Warning messages -- `INFO` - Informational messages -- `DEBUG` - Debug messages (verbose mode) - -## Configuration - -Log level can be configured in the `config.toml` file: - -```toml -[logging] -level = "info" -``` - -## Note - -Log files are automatically ignored by git as they contain runtime information and can become large over time. diff --git a/manage_translations.sh b/manage_translations.sh new file mode 100755 index 0000000..1bba9be --- /dev/null +++ b/manage_translations.sh @@ -0,0 +1,1045 @@ +#!/bin/bash +# +# Translation Management Utility +# This script helps manage language files for Export Trakt 4 Letterboxd +# + +# Enable error handling +set -o pipefail + +# We use PWD to get the current working directory +SCRIPT_DIR="$(pwd)" +LANG_DIR="${SCRIPT_DIR}/locales" +DEFAULT_LANG="en" + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +# Debug flag (set to true to enable debug output) +DEBUG=true + +# Debug information function +debug_log() { + if [ "$DEBUG" = true ]; then + echo -e "${BLUE}DEBUG:${NC} $1" + fi +} + +# Error function +error_log() { + echo -e "${RED}ERROR:${NC} $1" >&2 +} + +# Success function +success_log() { + echo -e "${GREEN}SUCCESS:${NC} $1" +} + +# Warning function +warning_log() { + echo -e "${YELLOW}WARNING:${NC} $1" +} + +# Initialize environment +initialize() { + debug_log "Working directory: ${SCRIPT_DIR}" + debug_log "Language directory: ${LANG_DIR}" + + if [ ! -d "$LANG_DIR" ]; then + mkdir -p "$LANG_DIR" + success_log "Created language directory: $LANG_DIR" + fi + + # Check for default language + if [ ! -d "${LANG_DIR}/${DEFAULT_LANG}" ]; then + warning_log "Default language directory (${DEFAULT_LANG}) not found" + fi + + # Add debug information + debug_log "Looking for language files in: $LANG_DIR" + if [ "$DEBUG" = true ]; then + find "$LANG_DIR" -type f -name "messages.sh" -print + fi +} + +# Function to display help +show_help() { + cat << EOF +Translation Management Utility +------------------------------ +Usage: $0 [command] [options] + +Commands: + list List available languages + create Create a new language template + update Update all language files with new strings from default language + status Show translation status for all languages + check Validate translation files for errors + export Export translations to a specified format (json, po) + import Import translations from external file + help Display this help message + +Examples: + $0 list + $0 create de + $0 update + $0 status + $0 check + $0 export json + $0 import translations.po + +Language codes should be 2-letter ISO language codes (e.g., 'en', 'fr', 'es', 'de'). +EOF +} + +# Initialize the environment +initialize + +# Function to load default language messages (as the base for all translations) +load_default_messages() { + local default_file="${LANG_DIR}/${DEFAULT_LANG}/LC_MESSAGES/messages.sh" + debug_log "Trying to load default language (${DEFAULT_LANG}) messages from: $default_file" + debug_log "File exists? $([ -f "$default_file" ] && echo "Yes" || echo "No")" + + if [ -f "$default_file" ]; then + if [ "$DEBUG" = true ]; then + debug_log "File content preview:" + head -n 20 "$default_file" + fi + + # Backup any existing MSG_ variables to avoid conflicts + local old_msg_vars=$(set | grep '^MSG_' | cut -d= -f1) + for var in $old_msg_vars; do + unset "$var" + done + + source "$default_file" + success_log "Loaded ${DEFAULT_LANG} messages as reference" + + # Get information about loaded messages + debug_log "Example message values:" + debug_log "Value of MSG_WELCOME: ${MSG_WELCOME}" + debug_log "Value of MSG_ERROR: ${MSG_ERROR}" + + # Count MSG_ variables using a different approach + msg_count=$(set | grep '^MSG_' | wc -l) + debug_log "Found $msg_count message variables" + + return 0 + else + error_log "Default language file not found at: $default_file" + error_log "Please make sure the default language file exists." + return 1 + fi +} + +# Function to list available languages with enhanced output +list_languages() { + local count=0 + echo "Available languages:" + echo "------------------" + + if [ "$DEBUG" = true ]; then + debug_log "Searching in subdirectories of $LANG_DIR" + ls -la "$LANG_DIR" + fi + + for lang_dir in "${LANG_DIR}"/*; do + if [ "$DEBUG" = true ]; then + debug_log "Checking directory: $lang_dir" + fi + + if [ -d "$lang_dir" ]; then + message_file="${lang_dir}/LC_MESSAGES/messages.sh" + + if [ "$DEBUG" = true ]; then + debug_log "Looking for message file: $message_file (exists: $([ -f "$message_file" ] && echo "Yes" || echo "No"))" + fi + + if [ -f "${message_file}" ]; then + lang_code=$(basename "$lang_dir") + # Get language name if possible + lang_name=$(grep '# Language:' "$message_file" | sed 's/# Language: //' | head -1) + + if [ -z "$lang_name" ]; then + lang_name="$lang_code" + fi + + # Check if this is the default language + if [ "$lang_code" = "$DEFAULT_LANG" ]; then + echo -e " - ${GREEN}$lang_code${NC} ($lang_name) [DEFAULT]" + else + echo " - $lang_code ($lang_name)" + fi + count=$((count + 1)) + fi + fi + done + + if [ $count -eq 0 ]; then + warning_log "No language files found." + else + echo "" + success_log "Total: $count language(s)" + fi +} + +# Function to create a new language template with better formatting +create_language() { + local lang="$1" + + # Check if language code is valid + if [ -z "$lang" ] || [ ${#lang} -ne 2 ]; then + error_log "Invalid language code. Please use a 2-letter ISO language code (e.g., 'en', 'fr')." + return 1 + fi + + # Check if the language already exists + if [ -f "${LANG_DIR}/${lang}/LC_MESSAGES/messages.sh" ]; then + warning_log "Language '$lang' already exists." + read -p "Do you want to overwrite it? (y/N): " confirm + if [ "$confirm" != "y" ] && [ "$confirm" != "Y" ]; then + warning_log "Operation cancelled by user." + return 1 + fi + fi + + # Load default language messages as reference + if ! load_default_messages; then + return 1 + fi + + # Create directory if it doesn't exist + local lang_dir="${LANG_DIR}/${lang}/LC_MESSAGES" + if [ ! -d "$lang_dir" ]; then + mkdir -p "$lang_dir" + success_log "Created directory: $lang_dir" + fi + + # Create template file + local template_file="${lang_dir}/messages.sh" + + # Get a list of all message variables + # Use set command to get all variables and filter for MSG_ prefix + local msg_vars=$(set | grep '^MSG_' | cut -d= -f1 | sort) + + # Get categories from the default language file + local categories=$(grep "^# " "${LANG_DIR}/${DEFAULT_LANG}/LC_MESSAGES/messages.sh" | grep -v "^# Language:") + + # Create template content + { + echo "#!/bin/bash" + echo "#" + echo "# Language: $lang" + echo "#" + echo "" + echo "# Define messages for $lang" + echo "# Variables must start with MSG_ to be recognized by the system" + echo "" + + # Get the current content of the default language file to preserve categories and comments + local current_section="" + while IFS= read -r line; do + # If this is a category comment, add it to the template + if [[ $line =~ ^#[[:space:]]+(.*)[[:space:]]*$ && ! "$line" =~ ^#[[:space:]]+Language: ]]; then + current_section=$(echo "$line" | sed 's/^#[[:space:]]*//g') + echo "$line" + # If this is a variable definition, add it with its English value + elif [[ $line =~ ^MSG_.*= ]]; then + var_name=$(echo "$line" | cut -d= -f1) + eng_value=${!var_name} + echo "$var_name=\"$eng_value\"" + # If this is an empty line, preserve it for readability + elif [[ -z "$line" ]]; then + echo "" + fi + done < "${LANG_DIR}/${DEFAULT_LANG}/LC_MESSAGES/messages.sh" + } > "$template_file" + + # Make file executable + chmod +x "$template_file" + + success_log "Created language template: $template_file" + echo "Please edit this file to add your translations." +} + +# Function to update all language files with new strings from default language +update_languages() { + # Load default language messages as reference + if ! load_default_messages; then + return 1 + fi + + echo "Updating language files..." + + # Get default language message keys and values + default_keys=() + default_values=() + + for var in $(set | grep '^MSG_' | cut -d= -f1 | sort); do + default_keys+=("$var") + default_values+=("${!var}") + done + + # Get categories from the default language file for proper formatting + local categories=$(grep "^# " "${LANG_DIR}/${DEFAULT_LANG}/LC_MESSAGES/messages.sh" | grep -v "^# Language:") + + # Count total languages updated + local updated_count=0 + local skipped_count=0 + + # Update each language file + for lang_dir in "${LANG_DIR}"/*; do + if [ -d "$lang_dir" ] && [ "$lang_dir" != "${LANG_DIR}/${DEFAULT_LANG}" ]; then + lang_code=$(basename "$lang_dir") + lang_file="${lang_dir}/LC_MESSAGES/messages.sh" + + if [ -f "$lang_file" ]; then + echo "Updating $lang_code language file..." + + # Back up the original file + cp "$lang_file" "${lang_file}.bak" + success_log "Created backup: ${lang_file}.bak" + + # Clear previous message variables + for var in $(set | grep '^MSG_' | cut -d= -f1); do + unset "$var" + done + + # Load current translations + source "$lang_file" + + # Create new file content with proper formatting + local temp_file="${lang_file}.new" + + # Get the current content of the default language file to preserve categories and comments + { + echo "#!/bin/bash" + echo "#" + echo "# Language: $lang_code" + echo "#" + echo "" + echo "# Define messages for $lang_code" + echo "# Variables must start with MSG_ to be recognized by the system" + echo "" + + local current_section="" + while IFS= read -r line; do + # If this is a category comment, add it to the template + if [[ $line =~ ^#[[:space:]]+(.*)[[:space:]]*$ && ! "$line" =~ ^#[[:space:]]+Language: ]]; then + current_section=$(echo "$line" | sed 's/^#[[:space:]]*//g') + echo "$line" + # If this is a variable definition, add it with its translation or English value + elif [[ $line =~ ^(MSG_[A-Za-z0-9_]+)= ]]; then + var_name="${BASH_REMATCH[1]}" + eng_value=${!var_name} + + if [ -n "${!var_name}" ]; then + # Use existing translation + echo "$var_name=\"${!var_name}\"" + else + # Add new key with English value as comment + echo "$var_name=\"$eng_value\" # TODO: Translate this" + fi + # If this is an empty line, preserve it for readability + elif [[ -z "$line" ]]; then + echo "" + fi + done < "${LANG_DIR}/${DEFAULT_LANG}/LC_MESSAGES/messages.sh" + } > "$temp_file" + + # Replace old file with new one + mv "$temp_file" "$lang_file" + chmod +x "$lang_file" + success_log "Updated $lang_file" + updated_count=$((updated_count + 1)) + else + warning_log "Language file not found for $lang_code, skipping." + skipped_count=$((skipped_count + 1)) + fi + fi + done + + echo "-----------------------------" + echo "Update summary:" + echo " - Languages updated: $updated_count" + echo " - Languages skipped: $skipped_count" + success_log "Language files updated successfully." +} + +# Function to validate translation files for errors +check_translation_files() { + echo "Validating translation files..." + echo "-------------------------------" + + local error_count=0 + local warning_count=0 + + # Check each language file + for lang_dir in "${LANG_DIR}"/*; do + if [ -d "$lang_dir" ]; then + lang_code=$(basename "$lang_dir") + lang_file="${lang_dir}/LC_MESSAGES/messages.sh" + + if [ -f "$lang_file" ]; then + echo "Checking $lang_code language file..." + + # Check if file has execute permission + if [ ! -x "$lang_file" ]; then + warning_log "$lang_file does not have execute permission." + chmod +x "$lang_file" + success_log "Fixed permissions for $lang_file" + warning_count=$((warning_count + 1)) + fi + + # Check for syntax errors in the shell script + bash -n "$lang_file" + if [ $? -ne 0 ]; then + error_log "Syntax error in $lang_file" + error_count=$((error_count + 1)) + continue + fi + + # Check for duplicate keys + local duplicates=$(grep -oE "^MSG_[A-Za-z0-9_]+" "$lang_file" | sort | uniq -d) + if [ -n "$duplicates" ]; then + warning_log "Duplicate keys found in $lang_file:" + echo "$duplicates" + warning_count=$((warning_count + 1)) + fi + + # Check for untranslated strings + local untranslated=$(grep -c "# TODO: Translate this" "$lang_file") + if [ $untranslated -gt 0 ]; then + warning_log "$lang_file has $untranslated untranslated strings." + warning_count=$((warning_count + 1)) + fi + + # Check for missing quotes + local missing_quotes=$(grep -E '^MSG_[A-Za-z0-9_]+=[^"]' "$lang_file" | grep -v '=""') + if [ -n "$missing_quotes" ]; then + error_log "Missing quotes in $lang_file:" + echo "$missing_quotes" + error_count=$((error_count + 1)) + fi + + # Success for this file + if [ $error_count -eq 0 ] && [ $warning_count -eq 0 ]; then + success_log "$lang_file is valid." + fi + else + error_log "Language file not found: $lang_file" + error_count=$((error_count + 1)) + fi + fi + done + + echo "-----------------------------" + echo "Validation summary:" + echo " - Errors: $error_count" + echo " - Warnings: $warning_count" + + if [ $error_count -eq 0 ] && [ $warning_count -eq 0 ]; then + success_log "All language files are valid." + return 0 + elif [ $error_count -eq 0 ]; then + warning_log "Language files have warnings but no errors." + return 0 + else + error_log "Language files have errors that must be fixed." + return 1 + fi +} + +# Function to show translation status with enhanced output +show_status() { + # Load default language messages as reference + if ! load_default_messages; then + return 1 + fi + + # Count total keys using set instead of env + local total_keys=$(set | grep -c '^MSG_') + debug_log "Total message keys found: $total_keys" + + if [ $total_keys -eq 0 ]; then + error_log "No message keys found in default language file." + return 1 + fi + + # Get all keys from default language + local default_keys=$(set | grep '^MSG_' | cut -d= -f1 | sort) + + echo "Translation Status:" + echo "------------------" + + # Track overall statistics + local total_languages=0 + local fully_translated=0 + local partially_translated=0 + local not_translated=0 + + for lang_dir in "${LANG_DIR}"/*; do + if [ -d "$lang_dir" ]; then + lang_code=$(basename "$lang_dir") + lang_file="${lang_dir}/LC_MESSAGES/messages.sh" + + if [ -f "$lang_file" ]; then + debug_log "Analyzing file: $lang_file" + total_languages=$((total_languages + 1)) + + # Count message keys in this language file + local msg_count=$(grep -c "^MSG_.*=" "$lang_file") + local translated_count=$(grep -c "^MSG_.*=" "$lang_file" | grep -v "# TODO: Translate this") + local untranslated_count=$(grep -c "# TODO: Translate this" "$lang_file") + local percentage=0 + + if [ $total_keys -gt 0 ]; then + percentage=$((translated_count * 100 / total_keys)) + + # Colorize output based on percentage + if [ $percentage -eq 100 ]; then + echo -e " - ${GREEN}$lang_code: $translated_count/$total_keys ($percentage%) translated${NC}" + fully_translated=$((fully_translated + 1)) + elif [ $percentage -ge 75 ]; then + echo -e " - ${YELLOW}$lang_code: $translated_count/$total_keys ($percentage%) translated${NC}" + partially_translated=$((partially_translated + 1)) + else + echo -e " - ${RED}$lang_code: $translated_count/$total_keys ($percentage%) translated${NC}" + partially_translated=$((partially_translated + 1)) + fi + + # Vérifier la présence de variables en minuscules et majuscules + local upper_case=$(grep -c "^MSG_[A-Z]" "$lang_file") + local lower_case=$(grep -c "^MSG_[a-z]" "$lang_file") + echo " Upper case keys: $upper_case, Lower case keys: $lower_case" + + # Identifier les doublons éventuels + echo " Checking for duplicate keys..." + local duplicates=$(grep -oE "^MSG_[A-Za-z0-9_]+" "$lang_file" | sort | uniq -d | wc -l) + if [ $duplicates -gt 0 ]; then + warning_log " $duplicates duplicate keys found" + # Afficher les clés dupliquées + grep -oE "^MSG_[A-Za-z0-9_]+" "$lang_file" | sort | uniq -d + fi + + # Show untranslated strings count if any + if [ $untranslated_count -gt 0 ]; then + warning_log " $untranslated_count untranslated strings" + fi + + # Check for extra keys (not in default language) + local extra_keys=$(comm -13 <(echo "$default_keys" | sort) <(grep -oE "^MSG_[A-Za-z0-9_]+" "$lang_file" | sort | uniq)) + if [ -n "$extra_keys" ]; then + warning_log " Extra keys not in default language:" + echo "$extra_keys" | sed 's/^/ - /' + fi + else + error_log " - $lang_code: No message keys found in default language" + not_translated=$((not_translated + 1)) + fi + else + error_log " - $lang_code: No messages file found" + not_translated=$((not_translated + 1)) + fi + fi + done + + echo "" + echo "Status Summary:" + echo " - Total languages: $total_languages" + echo " - Fully translated (100%): $fully_translated" + echo " - Partially translated: $partially_translated" + echo " - Not translated: $not_translated" + + return 0 +} + +# Function to export translations to different formats +export_translations() { + local format="$1" + + if [ -z "$format" ]; then + error_log "Please specify an export format (json, po)" + return 1 + fi + + # Create export directory if it doesn't exist + local export_dir="${SCRIPT_DIR}/exports" + if [ ! -d "$export_dir" ]; then + mkdir -p "$export_dir" + success_log "Created export directory: $export_dir" + fi + + case "$format" in + json) + export_json "$export_dir" + ;; + po) + export_po "$export_dir" + ;; + *) + error_log "Unsupported export format: $format. Use 'json' or 'po'." + return 1 + ;; + esac +} + +# Function to export to JSON format +export_json() { + local export_dir="$1" + + echo "Exporting translations to JSON format..." + + # Get all available languages + local languages=() + for lang_dir in "${LANG_DIR}"/*; do + if [ -d "$lang_dir" ] && [ -f "${lang_dir}/LC_MESSAGES/messages.sh" ]; then + languages+=($(basename "$lang_dir")) + fi + done + + # Create JSON for each language + for lang in "${languages[@]}"; do + local lang_file="${LANG_DIR}/${lang}/LC_MESSAGES/messages.sh" + local json_file="${export_dir}/${lang}.json" + + # Clear any existing MSG_ variables to avoid conflicts + for var in $(set | grep '^MSG_' | cut -d= -f1); do + unset "$var" + done + + # Source the language file to get all definitions + source "$lang_file" + + # Start JSON content + echo "{" > "$json_file" + + # Add each message key to JSON + local msg_vars=$(set | grep '^MSG_' | cut -d= -f1 | sort) + local first=true + for var in $msg_vars; do + local value=${!var} + # Escape quotes in the value + value=$(echo "$value" | sed 's/"/\\"/g') + + if [ "$first" = true ]; then + echo " \"$var\": \"$value\"" >> "$json_file" + first=false + else + echo " ,\"$var\": \"$value\"" >> "$json_file" + fi + done + + # End JSON content + echo "}" >> "$json_file" + + success_log "Created $json_file" + done + + # Create a combined JSON with all languages + local all_json="${export_dir}/all_translations.json" + echo "{" > "$all_json" + + local first_lang=true + for lang in "${languages[@]}"; do + if [ "$first_lang" = true ]; then + echo " \"$lang\": $(cat "${export_dir}/${lang}.json")" >> "$all_json" + first_lang=false + else + echo " ,\"$lang\": $(cat "${export_dir}/${lang}.json")" >> "$all_json" + fi + done + + echo "}" >> "$all_json" + + success_log "Created combined translations file: $all_json" + success_log "Translations successfully exported to JSON format in $export_dir" +} + +# Function to export to PO format (gettext compatible) +export_po() { + local export_dir="$1" + + echo "Exporting translations to PO format..." + + # Make sure default language exists + if [ ! -f "${LANG_DIR}/${DEFAULT_LANG}/LC_MESSAGES/messages.sh" ]; then + error_log "Default language file not found: ${LANG_DIR}/${DEFAULT_LANG}/LC_MESSAGES/messages.sh" + return 1 + fi + + # Load default language as a reference + for var in $(set | grep '^MSG_' | cut -d= -f1); do + unset "$var" + done + source "${LANG_DIR}/${DEFAULT_LANG}/LC_MESSAGES/messages.sh" + + # Get default messages + local default_keys=() + local default_values=() + for var in $(set | grep '^MSG_' | cut -d= -f1 | sort); do + default_keys+=("$var") + default_values+=("${!var}") + done + + # Current date in PO format + local po_date=$(date +"%Y-%m-%d %H:%M%z") + + # Get all available languages except default + for lang_dir in "${LANG_DIR}"/*; do + if [ -d "$lang_dir" ] && [ "$(basename "$lang_dir")" != "$DEFAULT_LANG" ]; then + local lang=$(basename "$lang_dir") + local lang_file="${lang_dir}/LC_MESSAGES/messages.sh" + local po_file="${export_dir}/${lang}.po" + + if [ -f "$lang_file" ]; then + # Clear any existing MSG_ variables to avoid conflicts + for var in $(set | grep '^MSG_' | cut -d= -f1); do + unset "$var" + done + + # Source the language file to get all definitions + source "$lang_file" + + # Create PO header + cat > "$po_file" << EOF +# Translation for Export Trakt 4 Letterboxd +# Copyright (C) $(date +"%Y") Export Trakt 4 Letterboxd +# This file is distributed under the same license as the Export Trakt 4 Letterboxd package. +# +msgid "" +msgstr "" +"Project-Id-Version: Export Trakt 4 Letterboxd 1.0\\n" +"Report-Msgid-Bugs-To: \\n" +"POT-Creation-Date: $po_date\\n" +"PO-Revision-Date: $po_date\\n" +"Last-Translator: Automatic export\\n" +"Language-Team: $lang\\n" +"Language: $lang\\n" +"MIME-Version: 1.0\\n" +"Content-Type: text/plain; charset=UTF-8\\n" +"Content-Transfer-Encoding: 8bit\\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\\n" + +EOF + + # Add each message to PO file + for i in "${!default_keys[@]}"; do + local key="${default_keys[$i]}" + local default_value="${default_values[$i]}" + local translated_value="${!key}" + + # If the translation exists, add it to the PO file + echo "#: ${key}" >> "$po_file" + echo "msgid \"$default_value\"" >> "$po_file" + echo "msgstr \"$translated_value\"" >> "$po_file" + echo "" >> "$po_file" + done + + success_log "Created $po_file" + fi + fi + done + + # Create a POT (template) file + local pot_file="${export_dir}/template.pot" + + # Create POT header + cat > "$pot_file" << EOF +# Translation template for Export Trakt 4 Letterboxd +# Copyright (C) $(date +"%Y") Export Trakt 4 Letterboxd +# This file is distributed under the same license as the Export Trakt 4 Letterboxd package. +# +msgid "" +msgstr "" +"Project-Id-Version: Export Trakt 4 Letterboxd 1.0\\n" +"Report-Msgid-Bugs-To: \\n" +"POT-Creation-Date: $po_date\\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\\n" +"Last-Translator: FULL NAME \\n" +"Language-Team: LANGUAGE \\n" +"Language: \\n" +"MIME-Version: 1.0\\n" +"Content-Type: text/plain; charset=UTF-8\\n" +"Content-Transfer-Encoding: 8bit\\n" + +EOF + + # Add each message to POT file + for i in "${!default_keys[@]}"; do + local key="${default_keys[$i]}" + local default_value="${default_values[$i]}" + + echo "#: ${key}" >> "$pot_file" + echo "msgid \"$default_value\"" >> "$pot_file" + echo "msgstr \"\"" >> "$pot_file" + echo "" >> "$pot_file" + done + + success_log "Created POT template: $pot_file" + success_log "Translations successfully exported to PO format in $export_dir" +} + +# Function to import translations from external file +import_translations() { + local import_file="$1" + + if [ -z "$import_file" ] || [ ! -f "$import_file" ]; then + error_log "Please specify a valid import file" + return 1 + fi + + echo "Importing translations from $import_file..." + + # Determine the file format based on extension + local file_ext="${import_file##*.}" + + case "$file_ext" in + po) + import_po "$import_file" + ;; + json) + import_json "$import_file" + ;; + *) + error_log "Unsupported import format: $file_ext. Use '.po' or '.json' files." + return 1 + ;; + esac +} + +# Function to import from PO format +import_po() { + local import_file="$1" + + # Try to determine language from PO file + local lang=$(grep '"Language:' "$import_file" | sed 's/.*"Language: \([^\\]*\).*/\1/' | head -1) + + if [ -z "$lang" ]; then + error_log "Could not determine language from PO file" + read -p "Please specify the language code (e.g., 'fr', 'de'): " lang + + if [ -z "$lang" ] || [ ${#lang} -ne 2 ]; then + error_log "Invalid language code" + return 1 + fi + fi + + # Check if the language already exists + local lang_file="${LANG_DIR}/${lang}/LC_MESSAGES/messages.sh" + if [ ! -f "$lang_file" ]; then + warning_log "Language file not found: $lang_file" + read -p "Create a new language file? (y/N): " confirm + if [ "$confirm" = "y" ] || [ "$confirm" = "Y" ]; then + create_language "$lang" + else + error_log "Import cancelled" + return 1 + fi + fi + + # Load default language to get keys + if ! load_default_messages; then + return 1 + fi + + # Get default messages mapping + declare -A default_messages + for var in $(set | grep '^MSG_' | cut -d= -f1); do + default_messages["${!var}"]="$var" + done + + # Parse PO file and update language file + local temp_file="${lang_file}.new" + cp "$lang_file" "$temp_file" + + # Parse each message block in the PO file + local current_msgid="" + local current_msgstr="" + local in_msgid=false + local in_msgstr=false + + while IFS= read -r line; do + if [[ $line =~ ^msgid\ \"(.*)\"$ ]]; then + current_msgid="${BASH_REMATCH[1]}" + in_msgid=true + in_msgstr=false + elif [[ $line =~ ^\"(.*)\"$ ]] && [ "$in_msgid" = true ]; then + current_msgid="$current_msgid${BASH_REMATCH[1]}" + elif [[ $line =~ ^msgstr\ \"(.*)\"$ ]]; then + current_msgstr="${BASH_REMATCH[1]}" + in_msgid=false + in_msgstr=true + elif [[ $line =~ ^\"(.*)\"$ ]] && [ "$in_msgstr" = true ]; then + current_msgstr="$current_msgstr${BASH_REMATCH[1]}" + elif [[ -z "$line" ]] && [ -n "$current_msgid" ] && [ -n "$current_msgstr" ]; then + # End of a message block, update the translation + local var_name="${default_messages[$current_msgid]}" + if [ -n "$var_name" ]; then + # Escape quotes in the value + current_msgstr=$(echo "$current_msgstr" | sed 's/"/\\"/g') + + # Update the variable in the language file + sed -i.bak "s|^$var_name=.*|$var_name=\"$current_msgstr\"|" "$temp_file" + fi + + current_msgid="" + current_msgstr="" + in_msgid=false + in_msgstr=false + fi + done < "$import_file" + + # Handle the last message block + if [ -n "$current_msgid" ] && [ -n "$current_msgstr" ]; then + local var_name="${default_messages[$current_msgid]}" + if [ -n "$var_name" ]; then + # Escape quotes in the value + current_msgstr=$(echo "$current_msgstr" | sed 's/"/\\"/g') + + # Update the variable in the language file + sed -i.bak "s|^$var_name=.*|$var_name=\"$current_msgstr\"|" "$temp_file" + fi + fi + + # Replace the original file + mv "$temp_file" "$lang_file" + rm -f "${lang_file}.bak" + chmod +x "$lang_file" + + success_log "Imported translations from $import_file to $lang_file" +} + +# Function to import from JSON format +import_json() { + local import_file="$1" + + # Try to determine if this is a single language or multiple languages + local first_line=$(head -1 "$import_file") + + if [[ "$first_line" =~ \{\"[a-z]{2}\"\: ]]; then + # This is a multi-language JSON file + warning_log "Detected multi-language JSON file. Importing all languages..." + + # Extract each language section + for lang in $(grep -o '"[a-z][a-z]":' "$import_file" | sed 's/"//g' | sed 's/://g'); do + echo "Importing language: $lang" + + # Extract the language section to a temporary file + local temp_json="/tmp/${lang}_import.json" + # Use a combination of sed and awk to extract the language section + sed -n "/\"$lang\":/,/^ \}/p" "$import_file" | sed '1s/^ "'"$lang"'": //' | sed '$s/ ,$//' > "$temp_json" + + # Import the language + import_json_language "$lang" "$temp_json" + rm -f "$temp_json" + done + else + # This is a single language JSON file + local lang="" + + # Try to determine language from filename + if [[ "$import_file" =~ ([a-z]{2})\.json$ ]]; then + lang="${BASH_REMATCH[1]}" + else + read -p "Please specify the language code for this JSON file (e.g., 'fr', 'de'): " lang + fi + + if [ -z "$lang" ] || [ ${#lang} -ne 2 ]; then + error_log "Invalid language code" + return 1 + fi + + import_json_language "$lang" "$import_file" + fi +} + +# Helper function to import a single language from JSON +import_json_language() { + local lang="$1" + local json_file="$2" + + # Check if the language already exists + local lang_file="${LANG_DIR}/${lang}/LC_MESSAGES/messages.sh" + if [ ! -f "$lang_file" ]; then + warning_log "Language file not found: $lang_file" + read -p "Create a new language file? (y/N): " confirm + if [ "$confirm" = "y" ] || [ "$confirm" = "Y" ]; then + create_language "$lang" + else + error_log "Import cancelled for language $lang" + return 1 + fi + fi + + # Create a temporary file for processing + local temp_file="${lang_file}.new" + cp "$lang_file" "$temp_file" + + # Parse each key-value pair in the JSON file + grep -oP '"MSG_[^"]+"\s*:\s*"[^"]*"' "$json_file" | while read -r line; do + local var_name=$(echo "$line" | grep -oP '"MSG_[^"]+"' | tr -d '"') + local var_value=$(echo "$line" | grep -oP ':\s*"\K[^"]*') + + # Escape quotes in the value + var_value=$(echo "$var_value" | sed 's/"/\\"/g') + + # Update the variable in the language file + if grep -q "^$var_name=" "$temp_file"; then + sed -i.bak "s|^$var_name=.*|$var_name=\"$var_value\"|" "$temp_file" + else + warning_log "Key $var_name not found in language file, skipping" + fi + done + + # Replace the original file + mv "$temp_file" "$lang_file" + rm -f "${lang_file}.bak" + chmod +x "$lang_file" + + success_log "Imported translations from $json_file to $lang_file" +} + +# Main script logic +case "$1" in + list) + list_languages + ;; + create) + create_language "$2" + ;; + update) + update_languages + ;; + status) + show_status + ;; + check) + check_translation_files + ;; + export) + export_translations "$2" + ;; + import) + import_translations "$2" + ;; + help|--help|-h) + show_help + ;; + *) + if [ -z "$1" ]; then + show_help + else + error_log "Unknown command: $1" + echo "Use '$0 help' to see available commands" + exit 1 + fi + ;; +esac + +exit 0 \ No newline at end of file diff --git a/pkg/api/trakt.go b/pkg/api/trakt.go deleted file mode 100644 index 9c04c71..0000000 --- a/pkg/api/trakt.go +++ /dev/null @@ -1,677 +0,0 @@ -package api - -import ( - "encoding/json" - "fmt" - "net/http" - "net/url" - "strconv" - "time" - - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger" -) - -const ( - maxRetries = 3 - retryInterval = time.Second -) - -// MovieIDs represents the various IDs associated with a movie -type MovieIDs struct { - Trakt int `json:"trakt"` - TMDB int `json:"tmdb"` - IMDB string `json:"imdb"` - Slug string `json:"slug"` -} - -// MovieInfo represents the basic movie information -type MovieInfo struct { - Title string `json:"title"` - Year int `json:"year"` - IDs MovieIDs `json:"ids"` - Tagline string `json:"tagline,omitempty"` - Overview string `json:"overview,omitempty"` - Released string `json:"released,omitempty"` - Runtime int `json:"runtime,omitempty"` - Country string `json:"country,omitempty"` - Updated string `json:"updated_at,omitempty"` - Trailer string `json:"trailer,omitempty"` - Homepage string `json:"homepage,omitempty"` - Rating float64 `json:"rating,omitempty"` - Votes int `json:"votes,omitempty"` - Comment int `json:"comment_count,omitempty"` - Genres []string `json:"genres,omitempty"` -} - -// Movie represents a watched movie with its metadata -type Movie struct { - Movie MovieInfo `json:"movie"` - LastWatchedAt string `json:"last_watched_at"` - Plays int `json:"plays,omitempty"` -} - -// CollectionMovie represents a movie in a collection -type CollectionMovie struct { - Movie MovieInfo `json:"movie"` - CollectedAt string `json:"collected_at"` -} - -// ShowIDs represents the various IDs associated with a show -type ShowIDs struct { - Trakt int `json:"trakt"` - TMDB int `json:"tmdb"` - IMDB string `json:"imdb"` - Slug string `json:"slug"` - TVDB int `json:"tvdb"` -} - -// ShowInfo represents the basic show information -type ShowInfo struct { - Title string `json:"title"` - Year int `json:"year"` - IDs ShowIDs `json:"ids"` - Overview string `json:"overview,omitempty"` - FirstAired string `json:"first_aired,omitempty"` - Runtime int `json:"runtime,omitempty"` - Network string `json:"network,omitempty"` - Country string `json:"country,omitempty"` - Updated string `json:"updated_at,omitempty"` - Trailer string `json:"trailer,omitempty"` - Homepage string `json:"homepage,omitempty"` - Status string `json:"status,omitempty"` - Rating float64 `json:"rating,omitempty"` - Votes int `json:"votes,omitempty"` - Comment int `json:"comment_count,omitempty"` - Genres []string `json:"genres,omitempty"` -} - -// EpisodeIDs represents the various IDs associated with an episode -type EpisodeIDs struct { - Trakt int `json:"trakt"` - TMDB int `json:"tmdb"` - TVDB int `json:"tvdb"` -} - -// EpisodeInfo represents the basic episode information -type EpisodeInfo struct { - Season int `json:"season"` - Number int `json:"number"` - Title string `json:"title"` - IDs EpisodeIDs `json:"ids"` - Overview string `json:"overview,omitempty"` - FirstAired string `json:"first_aired,omitempty"` - Updated string `json:"updated_at,omitempty"` - Rating float64 `json:"rating,omitempty"` - Votes int `json:"votes,omitempty"` - Comment int `json:"comment_count,omitempty"` -} - -// WatchedShow represents a watched show with its metadata -type WatchedShow struct { - Show ShowInfo `json:"show"` - Seasons []ShowSeason `json:"seasons"` - LastWatchedAt string `json:"last_watched_at"` - Plays int `json:"plays,omitempty"` -} - -// ShowSeason represents a season of a show -type ShowSeason struct { - Number int `json:"number"` - Episodes []EpisodeInfo `json:"episodes"` -} - -// Client represents a Trakt API client -type Client struct { - config *config.Config - logger logger.Logger - httpClient *http.Client -} - -// NewClient creates a new Trakt API client -func NewClient(cfg *config.Config, log logger.Logger) *Client { - return &Client{ - config: cfg, - logger: log, - httpClient: &http.Client{ - Timeout: time.Second * 30, - }, - } -} - -// makeRequest makes an HTTP request with retries -func (c *Client) makeRequest(req *http.Request) (*http.Response, error) { - var lastErr error - for attempt := 0; attempt < maxRetries; attempt++ { - if attempt > 0 { - c.logger.Warn("api.retrying_request", map[string]interface{}{ - "attempt": attempt + 1, - "max": maxRetries, - }) - time.Sleep(retryInterval * time.Duration(attempt)) - } - - resp, err := c.httpClient.Do(req) - if err != nil { - lastErr = fmt.Errorf("request failed: %w", err) - continue - } - - // Only retry on server errors (5xx) - if resp.StatusCode >= 500 { - resp.Body.Close() - lastErr = fmt.Errorf("server error: %d", resp.StatusCode) - continue - } - - return resp, nil - } - - return nil, fmt.Errorf("max retries exceeded: %w", lastErr) -} - -// addExtendedInfo adds the extended parameter to the URL if it's configured -func (c *Client) addExtendedInfo(endpoint string) string { - // Safety checks - if c == nil || c.config == nil { - return endpoint - } - - if c.config.Trakt.ExtendedInfo == "" { - return endpoint - } - - baseURL, err := url.Parse(endpoint) - if err != nil { - if c.logger != nil { - c.logger.Warn("api.url_parse_error", map[string]interface{}{ - "error": err.Error(), - }) - } - return endpoint - } - - q := baseURL.Query() - q.Set("extended", c.config.Trakt.ExtendedInfo) - baseURL.RawQuery = q.Encode() - return baseURL.String() -} - -// GetWatchedMovies retrieves the list of watched movies from Trakt -func (c *Client) GetWatchedMovies() ([]Movie, error) { - endpoint := c.addExtendedInfo(c.config.Trakt.APIBaseURL + "/sync/watched/movies") - req, err := http.NewRequest("GET", endpoint, nil) - if err != nil { - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to create request: %w", err) - } - - // Add required headers - req.Header.Set("Content-Type", "application/json") - req.Header.Set("trakt-api-version", "2") - req.Header.Set("trakt-api-key", c.config.Trakt.ClientID) - req.Header.Set("Authorization", "Bearer "+c.config.Trakt.AccessToken) - - resp, err := c.makeRequest(req) - if err != nil { - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to execute request: %w", err) - } - defer resp.Body.Close() - - // Handle rate limiting - if limit := resp.Header.Get("X-Ratelimit-Remaining"); limit != "" { - remaining, _ := strconv.Atoi(limit) - if remaining < 100 { - c.logger.Warn("api.rate_limit_warning", map[string]interface{}{ - "remaining": remaining, - }) - } - } - - // Check response status - if resp.StatusCode != http.StatusOK { - var errorResp map[string]string - if err := json.NewDecoder(resp.Body).Decode(&errorResp); err != nil { - errorResp = map[string]string{"error": "unknown error"} - } - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "status": resp.StatusCode, - "error": errorResp["error"], - }) - return nil, fmt.Errorf("API request failed with status %d: %s", resp.StatusCode, errorResp["error"]) - } - - // Parse response - var movies []Movie - if err := json.NewDecoder(resp.Body).Decode(&movies); err != nil { - c.logger.Error("errors.api_response_parse_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to parse response: %w", err) - } - - return movies, nil -} - -// GetCollectionMovies retrieves the list of movies in the user's collection from Trakt -func (c *Client) GetCollectionMovies() ([]CollectionMovie, error) { - endpoint := c.addExtendedInfo(c.config.Trakt.APIBaseURL + "/sync/collection/movies") - req, err := http.NewRequest("GET", endpoint, nil) - if err != nil { - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to create request: %w", err) - } - - // Add required headers - req.Header.Set("Content-Type", "application/json") - req.Header.Set("trakt-api-version", "2") - req.Header.Set("trakt-api-key", c.config.Trakt.ClientID) - req.Header.Set("Authorization", "Bearer "+c.config.Trakt.AccessToken) - - resp, err := c.makeRequest(req) - if err != nil { - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to execute request: %w", err) - } - defer resp.Body.Close() - - // Handle rate limiting - if limit := resp.Header.Get("X-Ratelimit-Remaining"); limit != "" { - remaining, _ := strconv.Atoi(limit) - if remaining < 100 { - c.logger.Warn("api.rate_limit_warning", map[string]interface{}{ - "remaining": remaining, - }) - } - } - - // Check response status - if resp.StatusCode != http.StatusOK { - var errorResp map[string]string - if err := json.NewDecoder(resp.Body).Decode(&errorResp); err != nil { - errorResp = map[string]string{"error": "unknown error"} - } - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "status": resp.StatusCode, - "error": errorResp["error"], - }) - return nil, fmt.Errorf("API request failed with status %d: %s", resp.StatusCode, errorResp["error"]) - } - - // Parse response - var movies []CollectionMovie - if err := json.NewDecoder(resp.Body).Decode(&movies); err != nil { - c.logger.Error("errors.api_response_parse_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to parse response: %w", err) - } - - c.logger.Info("api.collection_movies_fetched", map[string]interface{}{ - "count": len(movies), - }) - return movies, nil -} - -// GetWatchedShows retrieves the list of watched shows from Trakt -func (c *Client) GetWatchedShows() ([]WatchedShow, error) { - endpoint := c.addExtendedInfo(c.config.Trakt.APIBaseURL + "/sync/watched/shows") - req, err := http.NewRequest("GET", endpoint, nil) - if err != nil { - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to create request: %w", err) - } - - // Add required headers - req.Header.Set("Content-Type", "application/json") - req.Header.Set("trakt-api-version", "2") - req.Header.Set("trakt-api-key", c.config.Trakt.ClientID) - req.Header.Set("Authorization", "Bearer "+c.config.Trakt.AccessToken) - - resp, err := c.makeRequest(req) - if err != nil { - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to execute request: %w", err) - } - defer resp.Body.Close() - - // Handle rate limiting - if limit := resp.Header.Get("X-Ratelimit-Remaining"); limit != "" { - remaining, _ := strconv.Atoi(limit) - if remaining < 100 { - c.logger.Warn("api.rate_limit_warning", map[string]interface{}{ - "remaining": remaining, - }) - } - } - - // Check response status - if resp.StatusCode != http.StatusOK { - var errorResp map[string]string - if err := json.NewDecoder(resp.Body).Decode(&errorResp); err != nil { - errorResp = map[string]string{"error": "unknown error"} - } - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "status": resp.StatusCode, - "error": errorResp["error"], - }) - return nil, fmt.Errorf("API request failed with status %d: %s", resp.StatusCode, errorResp["error"]) - } - - // Parse response - var shows []WatchedShow - if err := json.NewDecoder(resp.Body).Decode(&shows); err != nil { - c.logger.Error("errors.api_response_parse_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to parse response: %w", err) - } - - c.logger.Info("api.watched_shows_fetched", map[string]interface{}{ - "count": len(shows), - }) - return shows, nil -} - -// Rating represents a user rating for movies -type Rating struct { - Movie MovieInfo `json:"movie"` - RatedAt string `json:"rated_at"` - Rating float64 `json:"rating"` -} - -// ShowRating represents a user rating for shows -type ShowRating struct { - Show ShowInfo `json:"show"` - RatedAt string `json:"rated_at"` - Rating float64 `json:"rating"` -} - -// EpisodeRating represents a user rating for episodes -type EpisodeRating struct { - Show ShowInfo `json:"show"` - Episode EpisodeInfo `json:"episode"` - RatedAt string `json:"rated_at"` - Rating float64 `json:"rating"` -} - -// WatchlistMovie represents a movie in the user's watchlist -type WatchlistMovie struct { - Movie MovieInfo `json:"movie"` - ListedAt string `json:"listed_at"` - Notes string `json:"notes,omitempty"` -} - -// GetRatings retrieves the user's ratings from Trakt -func (c *Client) GetRatings() ([]Rating, error) { - endpoint := c.addExtendedInfo(c.config.Trakt.APIBaseURL + "/sync/ratings/movies") - req, err := http.NewRequest("GET", endpoint, nil) - if err != nil { - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to create request: %w", err) - } - - // Add required headers - req.Header.Set("Content-Type", "application/json") - req.Header.Set("trakt-api-version", "2") - req.Header.Set("trakt-api-key", c.config.Trakt.ClientID) - req.Header.Set("Authorization", "Bearer "+c.config.Trakt.AccessToken) - - resp, err := c.makeRequest(req) - if err != nil { - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to execute request: %w", err) - } - defer resp.Body.Close() - - // Handle rate limiting - if limit := resp.Header.Get("X-Ratelimit-Remaining"); limit != "" { - remaining, _ := strconv.Atoi(limit) - if remaining < 100 { - c.logger.Warn("api.rate_limit_warning", map[string]interface{}{ - "remaining": remaining, - }) - } - } - - // Check response status - if resp.StatusCode != http.StatusOK { - var errorResp map[string]string - if err := json.NewDecoder(resp.Body).Decode(&errorResp); err != nil { - errorResp = map[string]string{"error": "unknown error"} - } - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "status": resp.StatusCode, - "error": errorResp["error"], - }) - return nil, fmt.Errorf("API request failed with status %d: %s", resp.StatusCode, errorResp["error"]) - } - - // Parse response - var ratings []Rating - if err := json.NewDecoder(resp.Body).Decode(&ratings); err != nil { - c.logger.Error("errors.api_response_parse_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to parse response: %w", err) - } - - c.logger.Info("api.ratings_fetched", map[string]interface{}{ - "count": len(ratings), - }) - return ratings, nil -} - -// GetWatchlist retrieves the user's movie watchlist from Trakt -func (c *Client) GetWatchlist() ([]WatchlistMovie, error) { - endpoint := c.addExtendedInfo(c.config.Trakt.APIBaseURL + "/sync/watchlist/movies") - req, err := http.NewRequest("GET", endpoint, nil) - if err != nil { - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to create request: %w", err) - } - - // Add required headers - req.Header.Set("Content-Type", "application/json") - req.Header.Set("trakt-api-version", "2") - req.Header.Set("trakt-api-key", c.config.Trakt.ClientID) - req.Header.Set("Authorization", "Bearer "+c.config.Trakt.AccessToken) - - resp, err := c.makeRequest(req) - if err != nil { - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to execute request: %w", err) - } - defer resp.Body.Close() - - // Handle rate limiting - if limit := resp.Header.Get("X-Ratelimit-Remaining"); limit != "" { - remaining, _ := strconv.Atoi(limit) - if remaining < 100 { - c.logger.Warn("api.rate_limit_warning", map[string]interface{}{ - "remaining": remaining, - }) - } - } - - // Check response status - if resp.StatusCode != http.StatusOK { - var errorResp map[string]string - if err := json.NewDecoder(resp.Body).Decode(&errorResp); err != nil { - errorResp = map[string]string{"error": "unknown error"} - } - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "status": resp.StatusCode, - "error": errorResp["error"], - }) - return nil, fmt.Errorf("API request failed with status %d: %s", resp.StatusCode, errorResp["error"]) - } - - // Parse response - var watchlist []WatchlistMovie - if err := json.NewDecoder(resp.Body).Decode(&watchlist); err != nil { - c.logger.Error("errors.api_response_parse_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to parse response: %w", err) - } - - c.logger.Info("api.watchlist_fetched", map[string]interface{}{ - "count": len(watchlist), - }) - return watchlist, nil -} - -// GetShowRatings retrieves the user's TV show ratings from Trakt -func (c *Client) GetShowRatings() ([]ShowRating, error) { - endpoint := c.addExtendedInfo(c.config.Trakt.APIBaseURL + "/sync/ratings/shows") - req, err := http.NewRequest("GET", endpoint, nil) - if err != nil { - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to create request: %w", err) - } - - // Add required headers - req.Header.Set("Content-Type", "application/json") - req.Header.Set("trakt-api-version", "2") - req.Header.Set("trakt-api-key", c.config.Trakt.ClientID) - req.Header.Set("Authorization", "Bearer "+c.config.Trakt.AccessToken) - - resp, err := c.makeRequest(req) - if err != nil { - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to execute request: %w", err) - } - defer resp.Body.Close() - - // Handle rate limiting - if limit := resp.Header.Get("X-Ratelimit-Remaining"); limit != "" { - remaining, _ := strconv.Atoi(limit) - if remaining < 100 { - c.logger.Warn("api.rate_limit_warning", map[string]interface{}{ - "remaining": remaining, - }) - } - } - - // Check response status - if resp.StatusCode != http.StatusOK { - var errorResp map[string]string - if err := json.NewDecoder(resp.Body).Decode(&errorResp); err != nil { - errorResp = map[string]string{"error": "unknown error"} - } - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "status": resp.StatusCode, - "error": errorResp["error"], - }) - return nil, fmt.Errorf("API request failed with status %d: %s", resp.StatusCode, errorResp["error"]) - } - - // Parse response - var ratings []ShowRating - if err := json.NewDecoder(resp.Body).Decode(&ratings); err != nil { - c.logger.Error("errors.api_response_parse_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to parse response: %w", err) - } - - c.logger.Info("api.show_ratings_fetched", map[string]interface{}{ - "count": len(ratings), - }) - return ratings, nil -} - -// GetEpisodeRatings retrieves the user's TV episode ratings from Trakt -func (c *Client) GetEpisodeRatings() ([]EpisodeRating, error) { - endpoint := c.addExtendedInfo(c.config.Trakt.APIBaseURL + "/sync/ratings/episodes") - req, err := http.NewRequest("GET", endpoint, nil) - if err != nil { - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to create request: %w", err) - } - - // Add required headers - req.Header.Set("Content-Type", "application/json") - req.Header.Set("trakt-api-version", "2") - req.Header.Set("trakt-api-key", c.config.Trakt.ClientID) - req.Header.Set("Authorization", "Bearer "+c.config.Trakt.AccessToken) - - resp, err := c.makeRequest(req) - if err != nil { - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to execute request: %w", err) - } - defer resp.Body.Close() - - // Handle rate limiting - if limit := resp.Header.Get("X-Ratelimit-Remaining"); limit != "" { - remaining, _ := strconv.Atoi(limit) - if remaining < 100 { - c.logger.Warn("api.rate_limit_warning", map[string]interface{}{ - "remaining": remaining, - }) - } - } - - // Check response status - if resp.StatusCode != http.StatusOK { - var errorResp map[string]string - if err := json.NewDecoder(resp.Body).Decode(&errorResp); err != nil { - errorResp = map[string]string{"error": "unknown error"} - } - c.logger.Error("errors.api_request_failed", map[string]interface{}{ - "status": resp.StatusCode, - "error": errorResp["error"], - }) - return nil, fmt.Errorf("API request failed with status %d: %s", resp.StatusCode, errorResp["error"]) - } - - // Parse response - var ratings []EpisodeRating - if err := json.NewDecoder(resp.Body).Decode(&ratings); err != nil { - c.logger.Error("errors.api_response_parse_failed", map[string]interface{}{ - "error": err.Error(), - }) - return nil, fmt.Errorf("failed to parse response: %w", err) - } - - c.logger.Info("api.episode_ratings_fetched", map[string]interface{}{ - "count": len(ratings), - }) - return ratings, nil -} - -// GetConfig returns the client's configuration -func (c *Client) GetConfig() *config.Config { - return c.config -} \ No newline at end of file diff --git a/pkg/api/trakt_test.go b/pkg/api/trakt_test.go deleted file mode 100644 index a23d357..0000000 --- a/pkg/api/trakt_test.go +++ /dev/null @@ -1,1135 +0,0 @@ -package api - -import ( - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "testing" - "time" - - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger" - "github.com/stretchr/testify/assert" -) - -// MockLogger implements the logger.Logger interface for testing -type MockLogger struct { - lastMessage string - lastData map[string]interface{} -} - -func (m *MockLogger) Info(messageID string, data ...map[string]interface{}) { - m.lastMessage = messageID - if len(data) > 0 { - m.lastData = data[0] - } -} - -func (m *MockLogger) Infof(messageID string, data map[string]interface{}) { - m.lastMessage = messageID - m.lastData = data -} - -func (m *MockLogger) Error(messageID string, data ...map[string]interface{}) { - m.lastMessage = messageID - if len(data) > 0 { - m.lastData = data[0] - } -} - -func (m *MockLogger) Errorf(messageID string, data map[string]interface{}) { - m.lastMessage = messageID - m.lastData = data -} - -func (m *MockLogger) Debug(messageID string, data ...map[string]interface{}) { - m.lastMessage = messageID - if len(data) > 0 { - m.lastData = data[0] - } -} - -func (m *MockLogger) Debugf(messageID string, data map[string]interface{}) { - m.lastMessage = messageID - m.lastData = data -} - -func (m *MockLogger) Warn(messageID string, data ...map[string]interface{}) { - m.lastMessage = messageID - if len(data) > 0 { - m.lastData = data[0] - } -} - -func (m *MockLogger) Warnf(messageID string, data map[string]interface{}) { - m.lastMessage = messageID - m.lastData = data -} - -func (m *MockLogger) SetLogLevel(level string) { - // No-op for testing -} - -func (m *MockLogger) SetLogFile(filePath string) error { - // No-op for testing - return nil -} - -func (m *MockLogger) SetTranslator(t logger.Translator) { - // No-op for testing -} - -// TestNewClient tests client initialization -func TestNewClient(t *testing.T) { - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "test_access_token", - APIBaseURL: "https://api.trakt.tv", - }, - } - log := &MockLogger{} - - client := NewClient(cfg, log) - if client == nil { - t.Error("Expected non-nil client") - return - } - - // Now safely check fields since we know client is not nil - if client.config == nil { - t.Error("Expected config to be set") - } else if client.config != cfg { - t.Error("Expected config to be set correctly") - } - - if client.logger == nil { - t.Error("Expected logger to be set") - } else if client.logger != log { - t.Error("Expected logger to be set correctly") - } - - if client.httpClient == nil { - t.Error("Expected non-nil HTTP client") - } -} - -// TestGetWatchedMovies tests the GetWatchedMovies endpoint -func TestGetWatchedMovies(t *testing.T) { - // Create a test server - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - // Verify request - if r.URL.Path != "/sync/watched/movies" { - t.Errorf("Expected path '/sync/watched/movies', got '%s'", r.URL.Path) - } - if r.Header.Get("Content-Type") != "application/json" { - t.Errorf("Expected Content-Type 'application/json', got '%s'", r.Header.Get("Content-Type")) - } - if r.Header.Get("trakt-api-key") != "test_client_id" { - t.Errorf("Expected client ID header, got '%s'", r.Header.Get("trakt-api-key")) - } - if r.Header.Get("Authorization") != "Bearer test_access_token" { - t.Errorf("Expected auth header, got '%s'", r.Header.Get("Authorization")) - } - - // Return test response - movies := []Movie{ - { - Movie: MovieInfo{ - Title: "Test Movie", - Year: 2024, - IDs: MovieIDs{ - Trakt: 12345, - TMDB: 67890, - IMDB: "tt0123456", - Slug: "test-movie-2024", - }, - }, - LastWatchedAt: time.Now().Format(time.RFC3339), - }, - } - json.NewEncoder(w).Encode(movies) - })) - defer server.Close() - - // Create client with test server URL - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "test_access_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - // Test successful request - movies, err := client.GetWatchedMovies() - if err != nil { - t.Errorf("Unexpected error: %v", err) - } - if len(movies) != 1 { - t.Errorf("Expected 1 movie, got %d", len(movies)) - } - if movies[0].Movie.Title != "Test Movie" { - t.Errorf("Expected movie title 'Test Movie', got '%s'", movies[0].Movie.Title) - } -} - -// TestGetWatchedMoviesError tests error handling in GetWatchedMovies -func TestGetWatchedMoviesError(t *testing.T) { - // Create a test server that returns an error - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusUnauthorized) - json.NewEncoder(w).Encode(map[string]string{ - "error": "Invalid access token", - }) - })) - defer server.Close() - - // Create client with test server URL - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "invalid_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - // Test error handling - movies, err := client.GetWatchedMovies() - if err == nil { - t.Error("Expected error but got none") - } - if movies != nil { - t.Error("Expected nil movies on error") - } - if log.lastMessage != "errors.api_request_failed" { - t.Errorf("Expected error message logged, got '%s'", log.lastMessage) - } -} - -// TestRateLimiting tests the rate limiting functionality -func TestRateLimiting(t *testing.T) { - // Create a test server that returns rate limit headers - requestCount := 0 - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - requestCount++ - w.Header().Set("X-Ratelimit-Limit", "1000") - w.Header().Set("X-Ratelimit-Remaining", "999") - w.Header().Set("X-Ratelimit-Reset", fmt.Sprintf("%d", time.Now().Add(time.Hour).Unix())) - json.NewEncoder(w).Encode([]Movie{}) - })) - defer server.Close() - - // Create client with test server URL - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "test_access_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - // Make multiple requests in quick succession - for i := 0; i < 3; i++ { - _, err := client.GetWatchedMovies() - if err != nil { - t.Errorf("Unexpected error: %v", err) - } - } - - // Verify rate limiting headers were processed - if requestCount != 3 { - t.Errorf("Expected 3 requests, got %d", requestCount) - } -} - -// TestRetryMechanism tests the retry mechanism for failed requests -func TestRetryMechanism(t *testing.T) { - failCount := 0 - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if failCount < 2 { - failCount++ - w.WriteHeader(http.StatusInternalServerError) - return - } - json.NewEncoder(w).Encode([]Movie{}) - })) - defer server.Close() - - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "test_access_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - // Test that request succeeds after retries - movies, err := client.GetWatchedMovies() - if err != nil { - t.Errorf("Unexpected error after retries: %v", err) - } - if movies == nil { - t.Error("Expected non-nil movies after retries") - } - if failCount != 2 { - t.Errorf("Expected 2 failures before success, got %d", failCount) - } -} - -// TestResponseParsing tests parsing of various response formats -func TestResponseParsing(t *testing.T) { - testCases := []struct { - name string - response string - validate func(*testing.T, []Movie) - }{ - { - name: "full movie details", - response: `[{ - "movie": { - "title": "Test Movie", - "year": 2024, - "ids": { - "trakt": 12345, - "tmdb": 67890, - "imdb": "tt0123456", - "slug": "test-movie-2024" - } - }, - "last_watched_at": "2024-03-26T12:00:00Z" - }]`, - validate: func(t *testing.T, movies []Movie) { - if len(movies) != 1 { - t.Fatalf("Expected 1 movie, got %d", len(movies)) - } - m := movies[0] - if m.Movie.Title != "Test Movie" { - t.Errorf("Expected title 'Test Movie', got '%s'", m.Movie.Title) - } - if m.Movie.Year != 2024 { - t.Errorf("Expected year 2024, got %d", m.Movie.Year) - } - if m.Movie.IDs.Trakt != 12345 { - t.Errorf("Expected Trakt ID 12345, got %d", m.Movie.IDs.Trakt) - } - }, - }, - { - name: "empty response", - response: "[]", - validate: func(t *testing.T, movies []Movie) { - if len(movies) != 0 { - t.Errorf("Expected empty movie list, got %d movies", len(movies)) - } - }, - }, - { - name: "minimal movie details", - response: `[{ - "movie": { - "title": "Test Movie", - "year": 2024 - }, - "last_watched_at": "2024-03-26T12:00:00Z" - }]`, - validate: func(t *testing.T, movies []Movie) { - if len(movies) != 1 { - t.Fatalf("Expected 1 movie, got %d", len(movies)) - } - m := movies[0] - if m.Movie.Title != "Test Movie" { - t.Errorf("Expected title 'Test Movie', got '%s'", m.Movie.Title) - } - if m.Movie.Year != 2024 { - t.Errorf("Expected year 2024, got %d", m.Movie.Year) - } - }, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Write([]byte(tc.response)) - })) - defer server.Close() - - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "test_access_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - movies, err := client.GetWatchedMovies() - if err != nil { - t.Fatalf("Unexpected error: %v", err) - } - tc.validate(t, movies) - }) - } -} - -func TestGetCollectionMovies(t *testing.T) { - // Set up mock HTTP server - mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - // Check request method and path - assert.Equal(t, "GET", r.Method) - assert.Equal(t, "/sync/collection/movies", r.URL.Path) - - // Check required headers - assert.Equal(t, "application/json", r.Header.Get("Content-Type")) - assert.Equal(t, "2", r.Header.Get("trakt-api-version")) - assert.Equal(t, "test-client-id", r.Header.Get("trakt-api-key")) - assert.Equal(t, "Bearer test-token", r.Header.Get("Authorization")) - - // Set rate limiting headers - w.Header().Set("X-Ratelimit-Remaining", "150") - - // Return mock response - w.WriteHeader(http.StatusOK) - w.Write([]byte(`[ - { - "movie": { - "title": "The Dark Knight", - "year": 2008, - "ids": { - "trakt": 16, - "slug": "the-dark-knight-2008", - "imdb": "tt0468569", - "tmdb": 155 - } - }, - "collected_at": "2023-01-15T23:40:30.000Z" - }, - { - "movie": { - "title": "Inception", - "year": 2010, - "ids": { - "trakt": 417, - "slug": "inception-2010", - "imdb": "tt1375666", - "tmdb": 27205 - } - }, - "collected_at": "2023-03-20T18:25:43.000Z" - } - ]`)) - })) - defer mockServer.Close() - - // Create client with mock server URL - mockConfig := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test-client-id", - AccessToken: "test-token", - APIBaseURL: mockServer.URL, - }, - } - mockLogger := &MockLogger{} - client := NewClient(mockConfig, mockLogger) - - // Call the method to test - movies, err := client.GetCollectionMovies() - - // Assert no error - assert.NoError(t, err) - - // Assert movies were correctly parsed - assert.Equal(t, 2, len(movies)) - - // Assert first movie details - assert.Equal(t, "The Dark Knight", movies[0].Movie.Title) - assert.Equal(t, 2008, movies[0].Movie.Year) - assert.Equal(t, 16, movies[0].Movie.IDs.Trakt) - assert.Equal(t, "tt0468569", movies[0].Movie.IDs.IMDB) - assert.Equal(t, "2023-01-15T23:40:30.000Z", movies[0].CollectedAt) - - // Assert second movie details - assert.Equal(t, "Inception", movies[1].Movie.Title) - assert.Equal(t, 2010, movies[1].Movie.Year) - assert.Equal(t, 417, movies[1].Movie.IDs.Trakt) - assert.Equal(t, "tt1375666", movies[1].Movie.IDs.IMDB) - assert.Equal(t, "2023-03-20T18:25:43.000Z", movies[1].CollectedAt) -} - -func TestGetCollectionMoviesError(t *testing.T) { - // Set up mock server that returns an error - mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusUnauthorized) - w.Write([]byte(`{"error": "Invalid OAuth token"}`)) - })) - defer mockServer.Close() - - // Create client with mock server URL - mockConfig := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test-client-id", - AccessToken: "invalid-token", - APIBaseURL: mockServer.URL, - }, - } - mockLogger := &MockLogger{} - client := NewClient(mockConfig, mockLogger) - - // Call the method to test - movies, err := client.GetCollectionMovies() - - // Assert error - assert.Error(t, err) - assert.Contains(t, err.Error(), "API request failed with status 401") - assert.Nil(t, movies) -} - -// TestGetWatchedShows tests the GetWatchedShows endpoint -func TestGetWatchedShows(t *testing.T) { - // Create a test server - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - // Verify request - if r.URL.Path != "/sync/watched/shows" { - t.Errorf("Expected path '/sync/watched/shows', got '%s'", r.URL.Path) - } - if r.Header.Get("Content-Type") != "application/json" { - t.Errorf("Expected Content-Type 'application/json', got '%s'", r.Header.Get("Content-Type")) - } - if r.Header.Get("trakt-api-key") != "test_client_id" { - t.Errorf("Expected client ID header, got '%s'", r.Header.Get("trakt-api-key")) - } - if r.Header.Get("Authorization") != "Bearer test_access_token" { - t.Errorf("Expected auth header, got '%s'", r.Header.Get("Authorization")) - } - - // Return test response - shows := []WatchedShow{ - { - Show: ShowInfo{ - Title: "Test Show", - Year: 2024, - IDs: ShowIDs{ - Trakt: 12345, - TMDB: 67890, - IMDB: "tt0123456", - Slug: "test-show-2024", - }, - }, - Seasons: []ShowSeason{ - { - Number: 1, - Episodes: []EpisodeInfo{ - { - Number: 1, - IDs: EpisodeIDs{ - Trakt: 12345, - }, - }, - }, - }, - }, - LastWatchedAt: time.Now().Format(time.RFC3339), - }, - } - json.NewEncoder(w).Encode(shows) - })) - defer server.Close() - - // Create client with test server URL - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "test_access_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - // Verify client is properly initialized - if client == nil { - t.Fatal("Expected non-nil client") - } - if client.config == nil { - t.Fatal("Expected non-nil config") - } - if client.logger == nil { - t.Fatal("Expected non-nil logger") - } - if client.httpClient == nil { - t.Fatal("Expected non-nil HTTP client") - } - - // Test successful request - shows, err := client.GetWatchedShows() - if err != nil { - t.Errorf("Unexpected error: %v", err) - } - if len(shows) != 1 { - t.Fatalf("Expected 1 show, got %d", len(shows)) - } - if shows[0].Show.Title != "Test Show" { - t.Errorf("Expected show title 'Test Show', got '%s'", shows[0].Show.Title) - } - if len(shows[0].Seasons) != 1 { - t.Errorf("Expected 1 season, got %d", len(shows[0].Seasons)) - } - - // Verify the success message was logged - assert.Equal(t, "api.watched_shows_fetched", log.lastMessage) - assert.Equal(t, 1, log.lastData["count"]) -} - -func TestGetWatchedShowsError(t *testing.T) { - // Create a test server that returns an error - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusUnauthorized) - json.NewEncoder(w).Encode(map[string]string{ - "error": "Invalid access token", - }) - })) - defer server.Close() - - // Create client with test server URL - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "invalid_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - // Verify client is properly initialized - if client == nil { - t.Fatal("Expected non-nil client") - } - if client.config == nil { - t.Fatal("Expected non-nil config") - } - if client.logger == nil { - t.Fatal("Expected non-nil logger") - } - if client.httpClient == nil { - t.Fatal("Expected non-nil HTTP client") - } - - // Test error handling - shows, err := client.GetWatchedShows() - if err == nil { - t.Error("Expected error but got none") - } - if shows != nil { - t.Error("Expected nil shows on error") - } - if log.lastMessage != "errors.api_request_failed" { - t.Errorf("Expected error message logged, got '%s'", log.lastMessage) - } -} - -// TestGetRatings tests the GetRatings endpoint -func TestGetRatings(t *testing.T) { - // Create a test server - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - // Verify request - if r.URL.Path != "/sync/ratings/movies" { - t.Errorf("Expected path '/sync/ratings/movies', got '%s'", r.URL.Path) - } - if r.Header.Get("Content-Type") != "application/json" { - t.Errorf("Expected Content-Type 'application/json', got '%s'", r.Header.Get("Content-Type")) - } - if r.Header.Get("trakt-api-key") != "test_client_id" { - t.Errorf("Expected client ID header, got '%s'", r.Header.Get("trakt-api-key")) - } - if r.Header.Get("Authorization") != "Bearer test_access_token" { - t.Errorf("Expected auth header, got '%s'", r.Header.Get("Authorization")) - } - - // Return test response - ratings := []Rating{ - { - Movie: MovieInfo{ - Title: "Test Movie", - Year: 2024, - IDs: MovieIDs{ - Trakt: 12345, - TMDB: 67890, - IMDB: "tt0123456", - Slug: "test-movie-2024", - }, - }, - RatedAt: time.Now().Format(time.RFC3339), - Rating: 8.5, - }, - } - json.NewEncoder(w).Encode(ratings) - })) - defer server.Close() - - // Create client with test server URL - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "test_access_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - // Test successful request - ratings, err := client.GetRatings() - if err != nil { - t.Errorf("Unexpected error: %v", err) - } - if len(ratings) != 1 { - t.Fatalf("Expected 1 rating, got %d", len(ratings)) - } - if ratings[0].Movie.Title != "Test Movie" { - t.Errorf("Expected movie title 'Test Movie', got '%s'", ratings[0].Movie.Title) - } - if ratings[0].Rating != 8.5 { - t.Errorf("Expected rating 8.5, got %f", ratings[0].Rating) - } -} - -// TestGetRatingsError tests error handling in GetRatings -func TestGetRatingsError(t *testing.T) { - // Create a test server that returns an error - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusUnauthorized) - json.NewEncoder(w).Encode(map[string]string{ - "error": "Invalid access token", - }) - })) - defer server.Close() - - // Create client with test server URL - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "invalid_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - // Test error handling - ratings, err := client.GetRatings() - if err == nil { - t.Error("Expected error but got none") - } - if ratings != nil { - t.Error("Expected nil ratings on error") - } - if log.lastMessage != "errors.api_request_failed" { - t.Errorf("Expected error message logged, got '%s'", log.lastMessage) - } -} - -// TestGetWatchlist tests the GetWatchlist endpoint -func TestGetWatchlist(t *testing.T) { - // Create a test server - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - // Verify request - if r.URL.Path != "/sync/watchlist/movies" { - t.Errorf("Expected path '/sync/watchlist/movies', got '%s'", r.URL.Path) - } - if r.Header.Get("Content-Type") != "application/json" { - t.Errorf("Expected Content-Type 'application/json', got '%s'", r.Header.Get("Content-Type")) - } - if r.Header.Get("trakt-api-key") != "test_client_id" { - t.Errorf("Expected client ID header, got '%s'", r.Header.Get("trakt-api-key")) - } - if r.Header.Get("Authorization") != "Bearer test_access_token" { - t.Errorf("Expected auth header, got '%s'", r.Header.Get("Authorization")) - } - - // Return test response - watchlist := []WatchlistMovie{ - { - Movie: MovieInfo{ - Title: "Test Movie", - Year: 2024, - IDs: MovieIDs{ - Trakt: 12345, - TMDB: 67890, - IMDB: "tt0123456", - Slug: "test-movie-2024", - }, - }, - ListedAt: time.Now().Format(time.RFC3339), - Notes: "Test notes", - }, - } - json.NewEncoder(w).Encode(watchlist) - })) - defer server.Close() - - // Create client with test server URL - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "test_access_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - // Test successful request - watchlist, err := client.GetWatchlist() - if err != nil { - t.Errorf("Unexpected error: %v", err) - } - if len(watchlist) != 1 { - t.Fatalf("Expected 1 watchlist item, got %d", len(watchlist)) - } - if watchlist[0].Movie.Title != "Test Movie" { - t.Errorf("Expected movie title 'Test Movie', got '%s'", watchlist[0].Movie.Title) - } - if watchlist[0].Notes != "Test notes" { - t.Errorf("Expected notes 'Test notes', got '%s'", watchlist[0].Notes) - } -} - -// TestGetWatchlistError tests error handling in GetWatchlist -func TestGetWatchlistError(t *testing.T) { - // Create a test server that returns an error - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusUnauthorized) - json.NewEncoder(w).Encode(map[string]string{ - "error": "Invalid access token", - }) - })) - defer server.Close() - - // Create client with test server URL - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "invalid_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - // Test error handling - watchlist, err := client.GetWatchlist() - if err == nil { - t.Error("Expected error but got none") - } - if watchlist != nil { - t.Error("Expected nil watchlist on error") - } - if log.lastMessage != "errors.api_request_failed" { - t.Errorf("Expected error message logged, got '%s'", log.lastMessage) - } -} - -func TestAddExtendedInfo(t *testing.T) { - // Test with extended info - cfg := &config.Config{ - Trakt: config.TraktConfig{ - APIBaseURL: "https://api.trakt.tv", - ExtendedInfo: "full", - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - url := client.addExtendedInfo("https://api.trakt.tv/movies/popular") - assert.Equal(t, "https://api.trakt.tv/movies/popular?extended=full", url) - - // Test without extended info - cfg = &config.Config{ - Trakt: config.TraktConfig{ - APIBaseURL: "https://api.trakt.tv", - ExtendedInfo: "", - }, - } - client = NewClient(cfg, log) - - url = client.addExtendedInfo("https://api.trakt.tv/movies/popular") - assert.Equal(t, "https://api.trakt.tv/movies/popular", url) - - // Test with invalid URL - cfg = &config.Config{ - Trakt: config.TraktConfig{ - APIBaseURL: "https://api.trakt.tv", - ExtendedInfo: "full", - }, - } - client = NewClient(cfg, log) - - url = client.addExtendedInfo("://invalid") - assert.Equal(t, "://invalid", url) -} - -// TestGetShowRatings tests the GetShowRatings endpoint -func TestGetShowRatings(t *testing.T) { - // Create a test server - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - // Verify request - if r.URL.Path != "/sync/ratings/shows" { - t.Errorf("Expected path '/sync/ratings/shows', got '%s'", r.URL.Path) - } - if r.Header.Get("Content-Type") != "application/json" { - t.Errorf("Expected Content-Type 'application/json', got '%s'", r.Header.Get("Content-Type")) - } - if r.Header.Get("trakt-api-key") != "test_client_id" { - t.Errorf("Expected client ID header, got '%s'", r.Header.Get("trakt-api-key")) - } - if r.Header.Get("Authorization") != "Bearer test_access_token" { - t.Errorf("Expected auth header, got '%s'", r.Header.Get("Authorization")) - } - - // Set rate-limiting header for coverage - w.Header().Set("X-Ratelimit-Remaining", "50") - - // Return test response - ratings := []ShowRating{ - { - Show: ShowInfo{ - Title: "Test Show", - Year: 2020, - IDs: ShowIDs{ - Trakt: 12345, - TMDB: 67890, - IMDB: "tt0123456", - TVDB: 98765, - Slug: "test-show-2020", - }, - }, - RatedAt: time.Now().Format(time.RFC3339), - Rating: 8.5, - }, - } - json.NewEncoder(w).Encode(ratings) - })) - defer server.Close() - - // Create client with test server URL - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "test_access_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - // Test successful request - ratings, err := client.GetShowRatings() - if err != nil { - t.Errorf("Unexpected error: %v", err) - } - if len(ratings) != 1 { - t.Errorf("Expected 1 show rating, got %d", len(ratings)) - } - if ratings[0].Show.Title != "Test Show" { - t.Errorf("Expected show title 'Test Show', got '%s'", ratings[0].Show.Title) - } - if ratings[0].Rating != 8.5 { - t.Errorf("Expected rating 8.5, got %f", ratings[0].Rating) - } -} - -// TestGetShowRatingsError tests error handling in GetShowRatings -func TestGetShowRatingsError(t *testing.T) { - // Create a test server that returns an error - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusUnauthorized) - json.NewEncoder(w).Encode(map[string]string{ - "error": "Invalid access token", - }) - })) - defer server.Close() - - // Create client with test server URL - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "invalid_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - // Test error handling - ratings, err := client.GetShowRatings() - if err == nil { - t.Error("Expected error but got none") - } - if ratings != nil { - t.Errorf("Expected nil ratings, got %v", ratings) - } -} - -// TestGetEpisodeRatings tests the GetEpisodeRatings endpoint -func TestGetEpisodeRatings(t *testing.T) { - // Create a test server - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - // Verify request - if r.URL.Path != "/sync/ratings/episodes" { - t.Errorf("Expected path '/sync/ratings/episodes', got '%s'", r.URL.Path) - } - if r.Header.Get("Content-Type") != "application/json" { - t.Errorf("Expected Content-Type 'application/json', got '%s'", r.Header.Get("Content-Type")) - } - if r.Header.Get("trakt-api-key") != "test_client_id" { - t.Errorf("Expected client ID header, got '%s'", r.Header.Get("trakt-api-key")) - } - if r.Header.Get("Authorization") != "Bearer test_access_token" { - t.Errorf("Expected auth header, got '%s'", r.Header.Get("Authorization")) - } - - // Set rate-limiting header for coverage - w.Header().Set("X-Ratelimit-Remaining", "50") - - // Return test response - ratings := []EpisodeRating{ - { - Show: ShowInfo{ - Title: "Test Show", - Year: 2020, - IDs: ShowIDs{ - Trakt: 12345, - TMDB: 67890, - IMDB: "tt0123456", - TVDB: 98765, - Slug: "test-show-2020", - }, - }, - Episode: EpisodeInfo{ - Season: 1, - Number: 2, - Title: "Test Episode", - IDs: EpisodeIDs{ - Trakt: 54321, - TMDB: 9876, - TVDB: 1234, - }, - }, - RatedAt: time.Now().Format(time.RFC3339), - Rating: 9.0, - }, - } - json.NewEncoder(w).Encode(ratings) - })) - defer server.Close() - - // Create client with test server URL - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "test_access_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - // Test successful request - ratings, err := client.GetEpisodeRatings() - if err != nil { - t.Errorf("Unexpected error: %v", err) - } - if len(ratings) != 1 { - t.Errorf("Expected 1 episode rating, got %d", len(ratings)) - } - if ratings[0].Show.Title != "Test Show" { - t.Errorf("Expected show title 'Test Show', got '%s'", ratings[0].Show.Title) - } - if ratings[0].Episode.Title != "Test Episode" { - t.Errorf("Expected episode title 'Test Episode', got '%s'", ratings[0].Episode.Title) - } - if ratings[0].Rating != 9.0 { - t.Errorf("Expected rating 9.0, got %f", ratings[0].Rating) - } -} - -// TestGetEpisodeRatingsError tests error handling in GetEpisodeRatings -func TestGetEpisodeRatingsError(t *testing.T) { - // Create a test server that returns an error - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusUnauthorized) - json.NewEncoder(w).Encode(map[string]string{ - "error": "Invalid access token", - }) - })) - defer server.Close() - - // Create client with test server URL - cfg := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "invalid_token", - APIBaseURL: server.URL, - }, - } - log := &MockLogger{} - client := NewClient(cfg, log) - - // Test error handling - ratings, err := client.GetEpisodeRatings() - if err == nil { - t.Error("Expected error but got none") - } - if ratings != nil { - t.Errorf("Expected nil ratings, got %v", ratings) - } -} - -// TestGetConfig tests the GetConfig method -func TestGetConfig(t *testing.T) { - // Create client with test config - expectedConfig := &config.Config{ - Trakt: config.TraktConfig{ - ClientID: "test_client_id", - ClientSecret: "test_client_secret", - AccessToken: "test_access_token", - APIBaseURL: "https://api.trakt.tv", - }, - } - log := &MockLogger{} - client := NewClient(expectedConfig, log) - - // Test GetConfig - returnedConfig := client.GetConfig() - if returnedConfig != expectedConfig { - t.Errorf("Expected config to be the same as what was passed in") - } -} \ No newline at end of file diff --git a/pkg/config/config.go b/pkg/config/config.go deleted file mode 100644 index b8b4ebf..0000000 --- a/pkg/config/config.go +++ /dev/null @@ -1,154 +0,0 @@ -package config - -import ( - "fmt" - - "github.com/BurntSushi/toml" -) - -// Config holds all configuration settings -type Config struct { - Trakt TraktConfig `toml:"trakt"` - Letterboxd LetterboxdConfig `toml:"letterboxd"` - Export ExportConfig `toml:"export"` - Logging LoggingConfig `toml:"logging"` - I18n I18nConfig `toml:"i18n"` -} - -// TraktConfig holds Trakt.tv API configuration -type TraktConfig struct { - ClientID string `toml:"client_id"` - ClientSecret string `toml:"client_secret"` - AccessToken string `toml:"access_token"` - APIBaseURL string `toml:"api_base_url"` - ExtendedInfo string `toml:"extended_info"` -} - -// LetterboxdConfig holds Letterboxd export configuration -type LetterboxdConfig struct { - ExportDir string `toml:"export_dir"` - WatchedFilename string `toml:"watched_filename"` - CollectionFilename string `toml:"collection_filename"` - ShowsFilename string `toml:"shows_filename"` - RatingsFilename string `toml:"ratings_filename"` - WatchlistFilename string `toml:"watchlist_filename"` - LetterboxdImportFilename string `toml:"letterboxd_import_filename"` -} - -// ExportConfig holds export settings -type ExportConfig struct { - Format string `toml:"format"` - DateFormat string `toml:"date_format"` - Timezone string `toml:"timezone"` -} - -// LoggingConfig holds logging settings -type LoggingConfig struct { - Level string `toml:"level"` - File string `toml:"file"` -} - -// I18nConfig holds internationalization settings -type I18nConfig struct { - DefaultLanguage string `toml:"default_language"` - Language string `toml:"language"` - LocalesDir string `toml:"locales_dir"` -} - -// LoadConfig reads the config file and returns a Config struct -func LoadConfig(path string) (*Config, error) { - var config Config - if _, err := toml.DecodeFile(path, &config); err != nil { - return nil, fmt.Errorf("failed to decode config file: %w", err) - } - - if err := config.Validate(); err != nil { - return nil, fmt.Errorf("invalid configuration: %w", err) - } - - return &config, nil -} - -// Validate checks if the configuration is valid -func (c *Config) Validate() error { - if err := c.Trakt.Validate(); err != nil { - return fmt.Errorf("trakt config: %w", err) - } - - if err := c.Letterboxd.Validate(); err != nil { - return fmt.Errorf("letterboxd config: %w", err) - } - - if err := c.Export.Validate(); err != nil { - return fmt.Errorf("export config: %w", err) - } - - if err := c.Logging.Validate(); err != nil { - return fmt.Errorf("logging config: %w", err) - } - - if err := c.I18n.Validate(); err != nil { - return fmt.Errorf("i18n config: %w", err) - } - - return nil -} - -// Validate checks if the Trakt configuration is valid -func (c *TraktConfig) Validate() error { - if c.APIBaseURL == "" { - return fmt.Errorf("api_base_url is required") - } - return nil -} - -// Validate checks if the Letterboxd configuration is valid -func (c *LetterboxdConfig) Validate() error { - if c.ExportDir == "" { - return fmt.Errorf("export_dir is required") - } - return nil -} - -// Validate checks if the Export configuration is valid -func (c *ExportConfig) Validate() error { - if c.Format == "" { - return fmt.Errorf("format is required") - } - if c.DateFormat == "" { - return fmt.Errorf("date_format is required") - } - // If timezone is empty, we'll use UTC as default, so no error needed - return nil -} - -// Validate checks if the Logging configuration is valid -func (c *LoggingConfig) Validate() error { - if c.Level == "" { - return fmt.Errorf("level is required") - } - validLevels := map[string]bool{ - "debug": true, - "info": true, - "warn": true, - "error": true, - } - if !validLevels[c.Level] { - return fmt.Errorf("invalid log level: %s", c.Level) - } - return nil -} - -// Validate checks if the I18n configuration is valid -func (c *I18nConfig) Validate() error { - if c.DefaultLanguage == "" { - return fmt.Errorf("default_language is required") - } - if c.Language == "" { - return fmt.Errorf("language is required") - } - if c.LocalesDir == "" { - return fmt.Errorf("locales_dir is required") - } - return nil -} \ No newline at end of file diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go deleted file mode 100644 index 06660d1..0000000 --- a/pkg/config/config_test.go +++ /dev/null @@ -1,259 +0,0 @@ -package config - -import ( - "os" - "path/filepath" - "testing" -) - -func TestLoadConfig(t *testing.T) { - // Create a temporary directory for test files - tmpDir, err := os.MkdirTemp("", "config_test") - if err != nil { - t.Fatalf("Failed to create temp directory: %v", err) - } - defer os.RemoveAll(tmpDir) - - // Test cases - tests := []struct { - name string - configData string - expectError bool - validate func(*testing.T, *Config) - }{ - { - name: "valid config", - configData: ` -[trakt] -client_id = "test_client_id" -client_secret = "test_client_secret" -access_token = "test_access_token" -api_base_url = "https://api.trakt.tv" - -[letterboxd] -export_dir = "exports" - -[export] -format = "csv" -date_format = "2006-01-02" - -[logging] -level = "info" -file = "logs/export.log" - -[i18n] -default_language = "en" -language = "en" -locales_dir = "locales" -`, - expectError: false, - validate: func(t *testing.T, cfg *Config) { - // Validate Trakt config - if cfg.Trakt.ClientID != "test_client_id" { - t.Errorf("Expected ClientID 'test_client_id', got '%s'", cfg.Trakt.ClientID) - } - if cfg.Trakt.APIBaseURL != "https://api.trakt.tv" { - t.Errorf("Expected APIBaseURL 'https://api.trakt.tv', got '%s'", cfg.Trakt.APIBaseURL) - } - - // Validate Letterboxd config - if cfg.Letterboxd.ExportDir != "exports" { - t.Errorf("Expected ExportDir 'exports', got '%s'", cfg.Letterboxd.ExportDir) - } - - // Validate Export config - if cfg.Export.Format != "csv" { - t.Errorf("Expected Format 'csv', got '%s'", cfg.Export.Format) - } - if cfg.Export.DateFormat != "2006-01-02" { - t.Errorf("Expected DateFormat '2006-01-02', got '%s'", cfg.Export.DateFormat) - } - - // Validate Logging config - if cfg.Logging.Level != "info" { - t.Errorf("Expected Level 'info', got '%s'", cfg.Logging.Level) - } - if cfg.Logging.File != "logs/export.log" { - t.Errorf("Expected File 'logs/export.log', got '%s'", cfg.Logging.File) - } - - // Validate I18n config - if cfg.I18n.DefaultLanguage != "en" { - t.Errorf("Expected DefaultLanguage 'en', got '%s'", cfg.I18n.DefaultLanguage) - } - if cfg.I18n.Language != "en" { - t.Errorf("Expected Language 'en', got '%s'", cfg.I18n.Language) - } - if cfg.I18n.LocalesDir != "locales" { - t.Errorf("Expected LocalesDir 'locales', got '%s'", cfg.I18n.LocalesDir) - } - }, - }, - { - name: "missing required fields", - configData: ` -[trakt] -api_base_url = "https://api.trakt.tv" -`, - expectError: true, // Now we expect an error due to missing required fields - }, - { - name: "invalid TOML", - configData: "invalid = ] TOML", - expectError: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Create a temporary config file - configPath := filepath.Join(tmpDir, "config.toml") - if err := os.WriteFile(configPath, []byte(tt.configData), 0644); err != nil { - t.Fatalf("Failed to write test config file: %v", err) - } - - // Load the config - cfg, err := LoadConfig(configPath) - - // Check error expectation - if tt.expectError && err == nil { - t.Error("Expected error but got none") - } else if !tt.expectError && err != nil { - t.Errorf("Unexpected error: %v", err) - } - - // Run validation if provided and no error occurred - if !tt.expectError && err == nil && tt.validate != nil { - tt.validate(t, cfg) - } - }) - } -} - -func TestLoadConfig_FileNotFound(t *testing.T) { - _, err := LoadConfig("nonexistent.toml") - if err == nil { - t.Error("Expected error when loading nonexistent file, got nil") - } -} - -func TestConfigValidation(t *testing.T) { - tests := []struct { - name string - config Config - expectError bool - errorMsg string - }{ - { - name: "missing trakt api base url", - config: Config{ - Trakt: TraktConfig{}, - Letterboxd: LetterboxdConfig{ - ExportDir: "exports", - }, - Export: ExportConfig{ - Format: "csv", - DateFormat: "2006-01-02", - }, - Logging: LoggingConfig{ - Level: "info", - }, - I18n: I18nConfig{ - DefaultLanguage: "en", - Language: "en", - LocalesDir: "locales", - }, - }, - expectError: true, - errorMsg: "trakt config: api_base_url is required", - }, - { - name: "invalid log level", - config: Config{ - Trakt: TraktConfig{ - APIBaseURL: "https://api.trakt.tv", - }, - Letterboxd: LetterboxdConfig{ - ExportDir: "exports", - }, - Export: ExportConfig{ - Format: "csv", - DateFormat: "2006-01-02", - }, - Logging: LoggingConfig{ - Level: "invalid", - }, - I18n: I18nConfig{ - DefaultLanguage: "en", - Language: "en", - LocalesDir: "locales", - }, - }, - expectError: true, - errorMsg: "logging config: invalid log level: invalid", - }, - { - name: "missing i18n language", - config: Config{ - Trakt: TraktConfig{ - APIBaseURL: "https://api.trakt.tv", - }, - Letterboxd: LetterboxdConfig{ - ExportDir: "exports", - }, - Export: ExportConfig{ - Format: "csv", - DateFormat: "2006-01-02", - }, - Logging: LoggingConfig{ - Level: "info", - }, - I18n: I18nConfig{ - DefaultLanguage: "en", - LocalesDir: "locales", - }, - }, - expectError: true, - errorMsg: "i18n config: language is required", - }, - { - name: "valid config", - config: Config{ - Trakt: TraktConfig{ - APIBaseURL: "https://api.trakt.tv", - }, - Letterboxd: LetterboxdConfig{ - ExportDir: "exports", - }, - Export: ExportConfig{ - Format: "csv", - DateFormat: "2006-01-02", - }, - Logging: LoggingConfig{ - Level: "info", - }, - I18n: I18nConfig{ - DefaultLanguage: "en", - Language: "en", - LocalesDir: "locales", - }, - }, - expectError: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := tt.config.Validate() - if tt.expectError { - if err == nil { - t.Error("Expected error but got none") - } else if err.Error() != tt.errorMsg { - t.Errorf("Expected error message '%s', got '%s'", tt.errorMsg, err.Error()) - } - } else if err != nil { - t.Errorf("Unexpected error: %v", err) - } - }) - } -} \ No newline at end of file diff --git a/pkg/export/letterboxd.go b/pkg/export/letterboxd.go deleted file mode 100644 index a978092..0000000 --- a/pkg/export/letterboxd.go +++ /dev/null @@ -1,784 +0,0 @@ -package export - -import ( - "encoding/csv" - "fmt" - "os" - "path/filepath" - "sort" - "strconv" - "strings" - "time" - - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger" -) - -// LetterboxdExporter handles the export of movies to Letterboxd format -type LetterboxdExporter struct { - config *config.Config - log logger.Logger -} - -// NewLetterboxdExporter creates a new Letterboxd exporter -func NewLetterboxdExporter(cfg *config.Config, log logger.Logger) *LetterboxdExporter { - return &LetterboxdExporter{ - config: cfg, - log: log, - } -} - -// getTimeInConfigTimezone returns the current time in the configured timezone -func (e *LetterboxdExporter) getTimeInConfigTimezone() time.Time { - now := time.Now().UTC() - - // If timezone is not set, use UTC - if e.config.Export.Timezone == "" { - e.log.Info("export.using_default_timezone", map[string]interface{}{ - "timezone": "UTC", - }) - return now - } - - // Try to load the configured timezone - loc, err := time.LoadLocation(e.config.Export.Timezone) - if err != nil { - e.log.Warn("export.timezone_load_failed", map[string]interface{}{ - "timezone": e.config.Export.Timezone, - "error": err.Error(), - }) - return now // Fall back to UTC on error - } - - // Return the time in the configured timezone - e.log.Info("export.using_configured_timezone", map[string]interface{}{ - "timezone": e.config.Export.Timezone, - "time": now.In(loc).Format(time.RFC3339), - }) - return now.In(loc) -} - -// getExportDir creates and returns the path to the directory where exports should be saved -func (e *LetterboxdExporter) getExportDir() (string, error) { - // Check if the export directory is already a temp/test directory - isTestDir := false - if e.config.Letterboxd.ExportDir != "" { - // Check if this seems to be a test directory - dirName := filepath.Base(e.config.Letterboxd.ExportDir) - if dirName == "letterboxd-test" || - dirName == "letterboxd_test" || - dirName == "export_test" || - dirName == "test" || - strings.Contains(dirName, "test") || - containsAny(e.config.Letterboxd.ExportDir, []string{ - "/tmp/", "/temp/", "/t/", - "/var/folders/", // macOS temp dir pattern - "Temp", "tmp", "temp"}) { - isTestDir = true - } - } - - // For test directories, use the directory as-is without creating subdirectories - if isTestDir { - // Ensure the directory exists - if err := os.MkdirAll(e.config.Letterboxd.ExportDir, 0755); err != nil { - e.log.Error("errors.export_dir_create_failed", map[string]interface{}{ - "error": err.Error(), - "path": e.config.Letterboxd.ExportDir, - }) - return "", fmt.Errorf("failed to create export directory: %w", err) - } - - e.log.Info("export.using_test_directory", map[string]interface{}{ - "path": e.config.Letterboxd.ExportDir, - }) - - return e.config.Letterboxd.ExportDir, nil - } - - // For normal operation, create a subdirectory with date and time - now := e.getTimeInConfigTimezone() - dirName := fmt.Sprintf("export_%s_%s", - now.Format("2006-01-02"), - now.Format("15-04")) - - // Full path to the export directory - exportDir := filepath.Join(e.config.Letterboxd.ExportDir, dirName) - - // Create the directory - if err := os.MkdirAll(exportDir, 0755); err != nil { - e.log.Error("errors.export_dir_create_failed", map[string]interface{}{ - "error": err.Error(), - "path": exportDir, - }) - return "", fmt.Errorf("failed to create export directory: %w", err) - } - - e.log.Info("export.using_directory", map[string]interface{}{ - "path": exportDir, - }) - - return exportDir, nil -} - -// Helper function to check if a string contains any of the substrings -func containsAny(s string, substrings []string) bool { - for _, substr := range substrings { - if strings.Contains(s, substr) { - return true - } - } - return false -} - -// ExportMovies exports the given movies to a CSV file in Letterboxd format -func (e *LetterboxdExporter) ExportMovies(movies []api.Movie) error { - // Get export directory - exportDir, err := e.getExportDir() - if err != nil { - return err - } - - // Check if we're in a test environment - isTestEnv := containsAny(exportDir, []string{"test", "tmp", "temp"}) - - // Use configured filename, or generate one with timestamp if not specified - var filename string - if e.config.Letterboxd.WatchedFilename != "" { - filename = e.config.Letterboxd.WatchedFilename - } else if isTestEnv { - // Use a fixed filename for tests to make it easier to locate - filename = "watched-export-test.csv" - } else { - // Use the configured timezone for filename timestamp - now := e.getTimeInConfigTimezone() - filename = fmt.Sprintf("letterboxd-export_%s_%s.csv", - now.Format("2006-01-02"), - now.Format("15-04")) - } - filePath := filepath.Join(exportDir, filename) - - // Create export file - file, err := os.Create(filePath) - if err != nil { - e.log.Error("errors.file_create_failed", map[string]interface{}{ - "error": err.Error(), - "path": filePath, - }) - return fmt.Errorf("failed to create export file: %w", err) - } - defer file.Close() - - writer := csv.NewWriter(file) - defer writer.Flush() - - // Write header - header := []string{"Title", "Year", "WatchedDate", "Rating10", "imdbID", "tmdbID", "Rewatch"} - if err := writer.Write(header); err != nil { - return fmt.Errorf("failed to write header: %w", err) - } - - // Get ratings for movies - ratings, err := e.fetchRatings() - if err != nil { - e.log.Warn("export.ratings_fetch_failed", map[string]interface{}{ - "error": err.Error(), - }) - } - - // Create a map of movie ratings for quick lookup - movieRatings := make(map[string]string) - for _, rating := range ratings { - // Use IMDB ID as key for the ratings map - if rating.Movie.IDs.IMDB != "" { - // Convert to integer value (1-10) - movieRatings[rating.Movie.IDs.IMDB] = strconv.Itoa(int(rating.Rating)) - } - } - - // Sort movies by watched date (most recent first) - sortedMovies := make([]api.Movie, len(movies)) - copy(sortedMovies, movies) - - // Sort the movies slice by LastWatchedAt (newest to oldest) - sort.Slice(sortedMovies, func(i, j int) bool { - timeI, errI := time.Parse(time.RFC3339, sortedMovies[i].LastWatchedAt) - timeJ, errJ := time.Parse(time.RFC3339, sortedMovies[j].LastWatchedAt) - - // Handle parsing errors or empty dates - if errI != nil && errJ != nil { - return false // Both invalid, order doesn't matter - } - if errI != nil { - return false // i has invalid date, put at end - } - if errJ != nil { - return true // j has invalid date, i comes first - } - - // Return true if timeI is after timeJ (reverse chronological order) - return timeI.After(timeJ) - }) - - // Write movies - for _, movie := range sortedMovies { - // Parse watched date - watchedDate := "" - if movie.LastWatchedAt != "" { - if parsedTime, err := time.Parse(time.RFC3339, movie.LastWatchedAt); err == nil { - watchedDate = parsedTime.Format(e.config.Export.DateFormat) - } - } - - // Get rating for this movie - rating := "" - if r, exists := movieRatings[movie.Movie.IDs.IMDB]; exists { - rating = r - } - - // Determine if this is a rewatch - rewatch := "false" - if movie.Plays > 1 { - rewatch = "true" - } - - // Convert TMDB ID to string - tmdbID := strconv.Itoa(movie.Movie.IDs.TMDB) - - record := []string{ - movie.Movie.Title, - strconv.Itoa(movie.Movie.Year), - watchedDate, - rating, - movie.Movie.IDs.IMDB, - tmdbID, - rewatch, - } - - if err := writer.Write(record); err != nil { - return fmt.Errorf("failed to write movie record: %w", err) - } - } - - e.log.Info("export.export_complete", map[string]interface{}{ - "count": len(movies), - "path": filePath, - }) - return nil -} - -// ExportCollectionMovies exports the user's movie collection to a CSV file in Letterboxd format -func (e *LetterboxdExporter) ExportCollectionMovies(movies []api.CollectionMovie) error { - // Get export directory - exportDir, err := e.getExportDir() - if err != nil { - return err - } - - // Check if we're in a test environment - isTestEnv := containsAny(exportDir, []string{"test", "tmp", "temp"}) - - // Use configured filename, or generate one with timestamp if not specified - var filename string - if e.config.Letterboxd.CollectionFilename != "" { - filename = e.config.Letterboxd.CollectionFilename - } else if isTestEnv { - // Use a fixed filename for tests to make it easier to locate - filename = "collection-export-test.csv" - } else { - // Use the configured timezone for filename timestamp - now := e.getTimeInConfigTimezone() - filename = fmt.Sprintf("collection-export_%s_%s.csv", - now.Format("2006-01-02"), - now.Format("15-04")) - } - filePath := filepath.Join(exportDir, filename) - - file, err := os.Create(filePath) - if err != nil { - e.log.Error("errors.file_create_failed", map[string]interface{}{ - "error": err.Error(), - "path": filePath, - }) - return fmt.Errorf("failed to create export file: %w", err) - } - defer file.Close() - - writer := csv.NewWriter(file) - defer writer.Flush() - - // Write header - header := []string{"Title", "Year", "CollectedDate", "imdbID", "tmdbID"} - if err := writer.Write(header); err != nil { - return fmt.Errorf("failed to write header: %w", err) - } - - // Write movies - for _, movie := range movies { - // Parse collected date - collectedDate := "" - if movie.CollectedAt != "" { - if parsedTime, err := time.Parse(time.RFC3339, movie.CollectedAt); err == nil { - collectedDate = parsedTime.Format(e.config.Export.DateFormat) - } - } - - // Convert TMDB ID to string - tmdbID := strconv.Itoa(movie.Movie.IDs.TMDB) - - record := []string{ - movie.Movie.Title, - strconv.Itoa(movie.Movie.Year), - collectedDate, - movie.Movie.IDs.IMDB, - tmdbID, - } - - if err := writer.Write(record); err != nil { - return fmt.Errorf("failed to write movie record: %w", err) - } - } - - e.log.Info("export.collection_export_complete", map[string]interface{}{ - "count": len(movies), - "path": filePath, - }) - return nil -} - -// ExportShows exports the user's watched shows to a CSV file -func (e *LetterboxdExporter) ExportShows(shows []api.WatchedShow) error { - // Get export directory - exportDir, err := e.getExportDir() - if err != nil { - return err - } - - // Use configured filename, or generate one with timestamp if not specified - var filename string - if e.config.Letterboxd.ShowsFilename != "" { - filename = e.config.Letterboxd.ShowsFilename - } else { - // Use the configured timezone for filename timestamp - now := e.getTimeInConfigTimezone() - filename = fmt.Sprintf("shows-export_%s_%s.csv", - now.Format("2006-01-02"), - now.Format("15-04")) - } - filePath := filepath.Join(exportDir, filename) - - file, err := os.Create(filePath) - if err != nil { - e.log.Error("errors.file_create_failed", map[string]interface{}{ - "error": err.Error(), - }) - return fmt.Errorf("failed to create export file: %w", err) - } - defer file.Close() - - writer := csv.NewWriter(file) - defer writer.Flush() - - // Write header - header := []string{"Title", "Year", "Season", "Episode", "EpisodeTitle", "LastWatched", "Rating10", "IMDb ID"} - if err := writer.Write(header); err != nil { - return fmt.Errorf("failed to write header: %w", err) - } - - // Check if episode titles are available - missingTitles := true - checkLimit := 0 - outerLoop: - for _, show := range shows { - for _, season := range show.Seasons { - for _, episode := range season.Episodes { - if episode.Title != "" { - missingTitles = false - break outerLoop - } - // Check only a reasonable number of episodes - checkLimit++ - if checkLimit > 20 { - break outerLoop - } - } - } - } - - if missingTitles { - e.log.Warn("export.episode_titles_missing", map[string]interface{}{ - "message": "Episode titles are missing. Check your Trakt API extended_info setting.", - }) - } - - // Fetch episode ratings - episodeRatings, err := e.fetchEpisodeRatings() - if err != nil { - e.log.Warn("export.episode_ratings_fetch_failed", map[string]interface{}{ - "error": err.Error(), - }) - } - - // Create a map of episode ratings for quick lookup - // Use a composite key of show_id:season:episode - episodeRatingMap := make(map[string]int) - for _, r := range episodeRatings { - if r.Show.IDs.Trakt > 0 && r.Episode.Season > 0 && r.Episode.Number > 0 { - key := fmt.Sprintf("%d:%d:%d", r.Show.IDs.Trakt, r.Episode.Season, r.Episode.Number) - episodeRatingMap[key] = int(r.Rating) - } - } - - // Fetch show ratings too - showRatings, err := e.fetchShowRatings() - if err != nil { - e.log.Warn("export.show_ratings_fetch_failed", map[string]interface{}{ - "error": err.Error(), - }) - } - - // Create a map of show ratings for quick lookup - showRatingMap := make(map[int]int) - for _, r := range showRatings { - if r.Show.IDs.Trakt > 0 { - showRatingMap[r.Show.IDs.Trakt] = int(r.Rating) - } - } - - // Write episodes - episodeCount := 0 - for _, show := range shows { - for _, season := range show.Seasons { - for _, episode := range season.Episodes { - // Parse watched date - watchedDate := "" - if show.LastWatchedAt != "" { - if parsedTime, err := time.Parse(time.RFC3339, show.LastWatchedAt); err == nil { - watchedDate = parsedTime.Format(e.config.Export.DateFormat) - } - } - - // Get rating for this episode - rating := "" - key := fmt.Sprintf("%d:%d:%d", show.Show.IDs.Trakt, season.Number, episode.Number) - if r, exists := episodeRatingMap[key]; exists { - rating = strconv.Itoa(r) - } else if r, exists := showRatingMap[show.Show.IDs.Trakt]; exists { - // If no episode rating, use show rating - rating = strconv.Itoa(r) - } - - record := []string{ - show.Show.Title, - strconv.Itoa(show.Show.Year), - strconv.Itoa(season.Number), - strconv.Itoa(episode.Number), - episode.Title, - watchedDate, - rating, - show.Show.IDs.IMDB, - } - - if err := writer.Write(record); err != nil { - return fmt.Errorf("failed to write episode record: %w", err) - } - episodeCount++ - } - } - } - - e.log.Info("export.shows_export_complete", map[string]interface{}{ - "shows": len(shows), - "episodes": episodeCount, - "path": filePath, - }) - return nil -} - -// ExportRatings exports the user's movie ratings to a CSV file in Letterboxd format -func (e *LetterboxdExporter) ExportRatings(ratings []api.Rating) error { - // Get export directory - exportDir, err := e.getExportDir() - if err != nil { - return err - } - - // Check if we're in a test environment - isTestEnv := containsAny(exportDir, []string{"test", "tmp", "temp"}) - - // Use configured filename, or generate one with timestamp if not specified - var filename string - if e.config.Letterboxd.RatingsFilename != "" { - filename = e.config.Letterboxd.RatingsFilename - } else if isTestEnv { - // Use a fixed filename for tests to make it easier to locate - filename = "ratings-export-test.csv" - } else { - // Use the configured timezone for filename timestamp - now := e.getTimeInConfigTimezone() - filename = fmt.Sprintf("ratings-export_%s_%s.csv", - now.Format("2006-01-02"), - now.Format("15-04")) - } - filePath := filepath.Join(exportDir, filename) - - file, err := os.Create(filePath) - if err != nil { - e.log.Error("errors.file_create_failed", map[string]interface{}{ - "error": err.Error(), - "path": filePath, - }) - return fmt.Errorf("failed to create export file: %w", err) - } - defer file.Close() - - writer := csv.NewWriter(file) - defer writer.Flush() - - // Write header - Letterboxd format for ratings - header := []string{"Title", "Year", "Rating10", "RatedDate", "IMDb ID"} - if err := writer.Write(header); err != nil { - return fmt.Errorf("failed to write header: %w", err) - } - - // Write ratings - for _, r := range ratings { - // Parse rated date - ratedDate := "" - if r.RatedAt != "" { - if parsedTime, err := time.Parse(time.RFC3339, r.RatedAt); err == nil { - ratedDate = parsedTime.Format(e.config.Export.DateFormat) - } - } - - // Use integer rating directly (1-10) - ratingStr := "" - if r.Rating > 0 { - ratingStr = strconv.Itoa(int(r.Rating)) - } - - record := []string{ - r.Movie.Title, - strconv.Itoa(r.Movie.Year), - ratingStr, - ratedDate, - r.Movie.IDs.IMDB, - } - - if err := writer.Write(record); err != nil { - return fmt.Errorf("failed to write rating record: %w", err) - } - } - - e.log.Info("export.ratings_export_complete", map[string]interface{}{ - "count": len(ratings), - "path": filePath, - }) - return nil -} - -// ExportWatchlist exports the user's movie watchlist to a CSV file in Letterboxd format -func (e *LetterboxdExporter) ExportWatchlist(watchlist []api.WatchlistMovie) error { - // Get export directory - exportDir, err := e.getExportDir() - if err != nil { - return err - } - - // Check if we're in a test environment - isTestEnv := containsAny(exportDir, []string{"test", "tmp", "temp"}) - - // Use configured filename, or generate one with timestamp if not specified - var filename string - if e.config.Letterboxd.WatchlistFilename != "" { - filename = e.config.Letterboxd.WatchlistFilename - } else if isTestEnv { - // Use a fixed filename for tests to make it easier to locate - filename = "watchlist-export-test.csv" - } else { - // Use the configured timezone for filename timestamp - now := e.getTimeInConfigTimezone() - filename = fmt.Sprintf("watchlist-export_%s_%s.csv", - now.Format("2006-01-02"), - now.Format("15-04")) - } - filePath := filepath.Join(exportDir, filename) - - file, err := os.Create(filePath) - if err != nil { - e.log.Error("errors.file_create_failed", map[string]interface{}{ - "error": err.Error(), - "path": filePath, - }) - return fmt.Errorf("failed to create export file: %w", err) - } - defer file.Close() - - writer := csv.NewWriter(file) - defer writer.Flush() - - // Write header - Letterboxd format for watchlist - header := []string{"Title", "Year", "ListedDate", "Rating10", "IMDb ID"} - if err := writer.Write(header); err != nil { - return fmt.Errorf("failed to write header: %w", err) - } - - // Write watchlist entries - for _, wl := range watchlist { - // Parse listed date - listedDate := "" - if wl.ListedAt != "" { - if parsedTime, err := time.Parse(time.RFC3339, wl.ListedAt); err == nil { - listedDate = parsedTime.Format(e.config.Export.DateFormat) - } - } - - record := []string{ - wl.Movie.Title, - strconv.Itoa(wl.Movie.Year), - listedDate, - wl.Notes, - wl.Movie.IDs.IMDB, - } - - if err := writer.Write(record); err != nil { - return fmt.Errorf("failed to write watchlist record: %w", err) - } - } - - e.log.Info("export.watchlist_export_complete", map[string]interface{}{ - "count": len(watchlist), - "path": filePath, - }) - return nil -} - -// ExportLetterboxdFormat exports the given movies to a CSV file in Letterboxd import format -// The format matches the official Letterboxd import format with columns: -// Title, Year, imdbID, tmdbID, WatchedDate, Rating10, Rewatch -func (e *LetterboxdExporter) ExportLetterboxdFormat(movies []api.Movie, ratings []api.Rating) error { - // Get export directory - exportDir, err := e.getExportDir() - if err != nil { - return err - } - - // Check if we're in a test environment - isTestEnv := containsAny(exportDir, []string{"test", "tmp", "temp"}) - - // Use configured filename, or standard name - var filename string - if e.config.Letterboxd.LetterboxdImportFilename != "" { - filename = e.config.Letterboxd.LetterboxdImportFilename - } else if isTestEnv { - // Use a fixed filename for tests to make it easier to locate - filename = "letterboxd-import-test.csv" - } else { - filename = "letterboxd_import.csv" - } - filePath := filepath.Join(exportDir, filename) - - file, err := os.Create(filePath) - if err != nil { - e.log.Error("errors.file_create_failed", map[string]interface{}{ - "error": err.Error(), - "path": filePath, - }) - return fmt.Errorf("failed to create export file: %w", err) - } - defer file.Close() - - writer := csv.NewWriter(file) - defer writer.Flush() - - // Write header - header := []string{"Title", "Year", "imdbID", "tmdbID", "WatchedDate", "Rating10", "Rewatch"} - if err := writer.Write(header); err != nil { - return fmt.Errorf("failed to write header: %w", err) - } - - // Create a map of movie ratings for quick lookup - movieRatings := make(map[string]float64) - for _, rating := range ratings { - // Use IMDB ID as key for the ratings map - if rating.Movie.IDs.IMDB != "" { - movieRatings[rating.Movie.IDs.IMDB] = rating.Rating - } - } - - // Create a map to track plays for determining rewatches - moviePlays := make(map[string]int) - for _, movie := range movies { - if movie.Movie.IDs.IMDB != "" { - moviePlays[movie.Movie.IDs.IMDB] += movie.Plays - } - } - - // Write movies - for _, movie := range movies { - // Parse watched date - watchedDate := "" - if movie.LastWatchedAt != "" { - if parsedTime, err := time.Parse(time.RFC3339, movie.LastWatchedAt); err == nil { - watchedDate = parsedTime.Format(e.config.Export.DateFormat) - } - } - - // Get rating (scale is already 1-10 in Trakt) - rating := "" - if r, exists := movieRatings[movie.Movie.IDs.IMDB]; exists { - rating = strconv.FormatFloat(r, 'f', 0, 64) - } - - // Determine if this is a rewatch - rewatch := "false" - if movie.Plays > 1 { - rewatch = "true" - } - - // Convert TMDB ID to string - tmdbID := strconv.Itoa(movie.Movie.IDs.TMDB) - - record := []string{ - movie.Movie.Title, - strconv.Itoa(movie.Movie.Year), - movie.Movie.IDs.IMDB, - tmdbID, - watchedDate, - rating, - rewatch, - } - - if err := writer.Write(record); err != nil { - return fmt.Errorf("failed to write movie record: %w", err) - } - } - - e.log.Info("export.letterboxd_export_complete", map[string]interface{}{ - "count": len(movies), - "path": filePath, - }) - return nil -} - -// fetchRatings is a helper function to get movie ratings -func (e *LetterboxdExporter) fetchRatings() ([]api.Rating, error) { - // Create a new Trakt client with the same config - client := api.NewClient(e.config, e.log) - return client.GetRatings() -} - -// fetchShowRatings is a helper function to get show ratings -func (e *LetterboxdExporter) fetchShowRatings() ([]api.ShowRating, error) { - // Create a new Trakt client with the same config - client := api.NewClient(e.config, e.log) - return client.GetShowRatings() -} - -// fetchEpisodeRatings is a helper function to get episode ratings -func (e *LetterboxdExporter) fetchEpisodeRatings() ([]api.EpisodeRating, error) { - // Create a new Trakt client with the same config - client := api.NewClient(e.config, e.log) - return client.GetEpisodeRatings() -} \ No newline at end of file diff --git a/pkg/export/letterboxd_test.go b/pkg/export/letterboxd_test.go deleted file mode 100644 index 6127ad3..0000000 --- a/pkg/export/letterboxd_test.go +++ /dev/null @@ -1,813 +0,0 @@ -package export - -import ( - "encoding/csv" - "os" - "path/filepath" - "strconv" - "strings" - "testing" - "time" - - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/api" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// MockLogger implements the logger.Logger interface for testing -type MockLogger struct { - lastMessage string - lastData map[string]interface{} -} - -func (m *MockLogger) Info(messageID string, data ...map[string]interface{}) { - m.lastMessage = messageID - if len(data) > 0 { - m.lastData = data[0] - } -} - -func (m *MockLogger) Infof(messageID string, data map[string]interface{}) { - m.lastMessage = messageID - m.lastData = data -} - -func (m *MockLogger) Error(messageID string, data ...map[string]interface{}) { - m.lastMessage = messageID - if len(data) > 0 { - m.lastData = data[0] - } -} - -func (m *MockLogger) Errorf(messageID string, data map[string]interface{}) { - m.lastMessage = messageID - m.lastData = data -} - -func (m *MockLogger) Debug(messageID string, data ...map[string]interface{}) { - m.lastMessage = messageID - if len(data) > 0 { - m.lastData = data[0] - } -} - -func (m *MockLogger) Debugf(messageID string, data map[string]interface{}) { - m.lastMessage = messageID - m.lastData = data -} - -func (m *MockLogger) Warn(messageID string, data ...map[string]interface{}) { - m.lastMessage = messageID - if len(data) > 0 { - m.lastData = data[0] - } -} - -func (m *MockLogger) Warnf(messageID string, data map[string]interface{}) { - m.lastMessage = messageID - m.lastData = data -} - -func (m *MockLogger) SetLogLevel(level string) { - // No-op for testing -} - -func (m *MockLogger) SetLogFile(filePath string) error { - // No-op for testing - return nil -} - -func (m *MockLogger) SetTranslator(t logger.Translator) { - // No-op for testing -} - -// TestNewLetterboxdExporter tests the creation of a new Letterboxd exporter -func TestNewLetterboxdExporter(t *testing.T) { - cfg := &config.Config{ - Letterboxd: config.LetterboxdConfig{ - ExportDir: "test_exports", - }, - Export: config.ExportConfig{ - Format: "csv", - DateFormat: "2006-01-02", - }, - } - log := &MockLogger{} - - exporter := NewLetterboxdExporter(cfg, log) - if exporter == nil { - t.Error("Expected non-nil exporter") - return - } - - // Safely check if config is properly set - if exporter.config == nil { - t.Error("Expected config to be set, but got nil") - } else if exporter.config != cfg { - t.Error("Expected config to match the provided config") - } - - // Safely check if logger is properly set - if exporter.log == nil { - t.Error("Expected logger to be set, but got nil") - } -} - -// TestExportMovies tests the export of movies to a CSV file -func TestExportMovies(t *testing.T) { - // Create a temporary directory for test exports - tmpDir, err := os.MkdirTemp("", "letterboxd_test") - if err != nil { - t.Fatalf("Failed to create temp directory: %v", err) - } - defer os.RemoveAll(tmpDir) - - // Create test configuration - cfg := &config.Config{ - Letterboxd: config.LetterboxdConfig{ - ExportDir: tmpDir, - }, - Export: config.ExportConfig{ - Format: "csv", - DateFormat: "2006-01-02", - }, - } - log := &MockLogger{} - - // Create test movies - testMovies := []api.Movie{ - { - Movie: api.MovieInfo{ - Title: "Test Movie 1", - Year: 2020, - IDs: api.MovieIDs{ - IMDB: "tt1234567", - }, - }, - LastWatchedAt: time.Now().Format(time.RFC3339), - }, - { - Movie: api.MovieInfo{ - Title: "Test Movie 2", - Year: 2021, - IDs: api.MovieIDs{ - IMDB: "tt2345678", - }, - }, - LastWatchedAt: time.Now().Add(-24 * time.Hour).Format(time.RFC3339), - }, - } - - // Create exporter and export movies - exporter := NewLetterboxdExporter(cfg, log) - err = exporter.ExportMovies(testMovies) - if err != nil { - t.Fatalf("Failed to export movies: %v", err) - } - - // Check for the expected export file with fixed name - expectedFilePath := filepath.Join(tmpDir, "watched-export-test.csv") - if _, err := os.Stat(expectedFilePath); os.IsNotExist(err) { - t.Fatalf("Expected export file not found: %s", expectedFilePath) - } - - // Check file content - content, err := os.ReadFile(expectedFilePath) - if err != nil { - t.Fatalf("Failed to read export file: %v", err) - } - - // Verify file content - fileContent := string(content) - expectedHeaders := "Title,Year,WatchedDate,Rating10,imdbID,tmdbID,Rewatch" - if len(fileContent) == 0 || content[0] == 0 { - t.Error("Export file is empty") - } - if fileContent[:len(expectedHeaders)] != expectedHeaders { - t.Errorf("Expected headers '%s', got '%s'", expectedHeaders, fileContent[:len(expectedHeaders)]) - } - for _, movie := range testMovies { - if !strings.Contains(fileContent, movie.Movie.Title) { - t.Errorf("Export file does not contain movie title '%s'", movie.Movie.Title) - } - } -} - -// TestExportMoviesErrorHandling tests error handling in the export process -func TestExportMoviesErrorHandling(t *testing.T) { - // Test with invalid export directory - cfg := &config.Config{ - Letterboxd: config.LetterboxdConfig{ - ExportDir: "/nonexistent/directory/that/should/not/exist", - }, - Export: config.ExportConfig{ - Format: "csv", - DateFormat: "2006-01-02", - }, - } - log := &MockLogger{} - - exporter := NewLetterboxdExporter(cfg, log) - err := exporter.ExportMovies([]api.Movie{}) - if err == nil { - t.Error("Expected error for invalid export directory, got nil") - } -} - -func TestExportCollectionMovies(t *testing.T) { - // Create a temporary directory for the test - tempDir, err := os.MkdirTemp("", "export_test") - if err != nil { - t.Fatalf("failed to create temp dir: %v", err) - } - defer os.RemoveAll(tempDir) - - // Create mock config and logger - mockConfig := &config.Config{ - Letterboxd: config.LetterboxdConfig{ - ExportDir: tempDir, - }, - Export: config.ExportConfig{ - DateFormat: "2006-01-02", - }, - } - mockLogger := &MockLogger{} - - // Create test movies - testMovies := []api.CollectionMovie{ - { - Movie: api.MovieInfo{ - Title: "The Dark Knight", - Year: 2008, - IDs: api.MovieIDs{ - Trakt: 16, - IMDB: "tt0468569", - TMDB: 155, - Slug: "the-dark-knight-2008", - }, - }, - CollectedAt: "2023-01-15T23:40:30.000Z", - }, - { - Movie: api.MovieInfo{ - Title: "Inception", - Year: 2010, - IDs: api.MovieIDs{ - Trakt: 417, - IMDB: "tt1375666", - TMDB: 27205, - Slug: "inception-2010", - }, - }, - CollectedAt: "2023-03-20T18:25:43.000Z", - }, - } - - // Create exporter and export movies - exporter := NewLetterboxdExporter(mockConfig, mockLogger) - err = exporter.ExportCollectionMovies(testMovies) - - // Assert no error - assert.NoError(t, err) - - // Check for the expected export file with fixed name - expectedFilePath := filepath.Join(tempDir, "collection-export-test.csv") - assert.FileExists(t, expectedFilePath, "Export file should exist") - - // Read the CSV file - file, err := os.Open(expectedFilePath) - assert.NoError(t, err) - defer file.Close() - - reader := csv.NewReader(file) - records, err := reader.ReadAll() - assert.NoError(t, err) - - // Check the header - assert.Equal(t, []string{"Title", "Year", "CollectedDate", "imdbID", "tmdbID"}, records[0]) - - // Check movie records - assert.Equal(t, "The Dark Knight", records[1][0]) - assert.Equal(t, "2008", records[1][1]) - assert.Equal(t, "2023-01-15", records[1][2]) - assert.Equal(t, "tt0468569", records[1][3]) - assert.Equal(t, "155", records[1][4]) - - assert.Equal(t, "Inception", records[2][0]) - assert.Equal(t, "2010", records[2][1]) - assert.Equal(t, "2023-03-20", records[2][2]) - assert.Equal(t, "tt1375666", records[2][3]) - assert.Equal(t, "27205", records[2][4]) -} - -func TestExportShows(t *testing.T) { - // Create a temporary directory for test files - tempDir, err := os.MkdirTemp("", "letterboxd-test") - require.NoError(t, err) - defer os.RemoveAll(tempDir) - - // Create a test config - cfg := &config.Config{ - Letterboxd: config.LetterboxdConfig{ - ExportDir: tempDir, - ShowsFilename: "test-shows-export.csv", - }, - Export: config.ExportConfig{ - DateFormat: "2006-01-02", - }, - } - - // Create a test logger - log := &MockLogger{} - - // Create an exporter with the test config - exporter := NewLetterboxdExporter(cfg, log) - - // Create some test data - testShow := api.WatchedShow{ - Show: api.ShowInfo{ - Title: "Game of Thrones", - Year: 2011, - IDs: api.ShowIDs{ - IMDB: "tt0944947", - }, - }, - Seasons: []api.ShowSeason{ - { - Number: 1, - Episodes: []api.EpisodeInfo{ - { - Number: 1, - Title: "Winter Is Coming", - IDs: api.EpisodeIDs{ - Trakt: 73640, - TVDB: 3254641, - }, - }, - { - Number: 2, - Title: "The Kingsroad", - IDs: api.EpisodeIDs{ - Trakt: 73641, - TVDB: 3254651, - }, - }, - }, - }, - { - Number: 2, - Episodes: []api.EpisodeInfo{ - { - Number: 1, - Title: "The North Remembers", - IDs: api.EpisodeIDs{ - Trakt: 73642, - TVDB: 4077553, - }, - }, - }, - }, - }, - LastWatchedAt: "2022-01-01T12:00:00Z", - } - - shows := []api.WatchedShow{testShow} - - // Test the export function - err = exporter.ExportShows(shows) - require.NoError(t, err) - - // Verify the file exists - filePath := filepath.Join(tempDir, "test-shows-export.csv") - _, err = os.Stat(filePath) - require.NoError(t, err) - - // Read the file content - file, err := os.Open(filePath) - require.NoError(t, err) - defer file.Close() - - reader := csv.NewReader(file) - lines, err := reader.ReadAll() - require.NoError(t, err) - - // Verify header - require.Equal(t, []string{"Title", "Year", "Season", "Episode", "EpisodeTitle", "LastWatched", "Rating10", "IMDb ID"}, lines[0]) - - // Verify content - require.Len(t, lines, 4) // header + 3 episodes - require.Equal(t, "Game of Thrones", lines[1][0]) - require.Equal(t, "2011", lines[1][1]) - require.Equal(t, "1", lines[1][2]) - require.Equal(t, "1", lines[1][3]) - require.Equal(t, "Winter Is Coming", lines[1][4]) - require.Equal(t, "2022-01-01", lines[1][5]) - - require.Equal(t, "Game of Thrones", lines[2][0]) - require.Equal(t, "1", lines[2][2]) - require.Equal(t, "2", lines[2][3]) - require.Equal(t, "The Kingsroad", lines[2][4]) - - require.Equal(t, "Game of Thrones", lines[3][0]) - require.Equal(t, "2", lines[3][2]) - require.Equal(t, "1", lines[3][3]) - require.Equal(t, "The North Remembers", lines[3][4]) -} - -// TestExportRatings tests the export of movie ratings to a CSV file -func TestExportRatings(t *testing.T) { - // Create a temporary directory for test exports - tmpDir, err := os.MkdirTemp("", "ratings_test") - if err != nil { - t.Fatalf("Failed to create temp directory: %v", err) - } - defer os.RemoveAll(tmpDir) - - // Create test configuration - cfg := &config.Config{ - Letterboxd: config.LetterboxdConfig{ - ExportDir: tmpDir, - }, - Export: config.ExportConfig{ - Format: "csv", - DateFormat: "2006-01-02", - }, - } - log := &MockLogger{} - - // Create test ratings - testRatings := []api.Rating{ - { - Movie: api.MovieInfo{ - Title: "Test Movie 1", - Year: 2020, - IDs: api.MovieIDs{ - IMDB: "tt1234567", - }, - }, - Rating: 8.5, - RatedAt: time.Now().Format(time.RFC3339), - }, - { - Movie: api.MovieInfo{ - Title: "Test Movie 2", - Year: 2021, - IDs: api.MovieIDs{ - IMDB: "tt2345678", - }, - }, - Rating: 7.0, - RatedAt: time.Now().Add(-24 * time.Hour).Format(time.RFC3339), - }, - } - - // Create exporter and export ratings - exporter := NewLetterboxdExporter(cfg, log) - - // Update the ExportRatings method in letterboxd.go to use a fixed filename for tests - // before running this test - err = exporter.ExportRatings(testRatings) - if err != nil { - t.Fatalf("Failed to export ratings: %v", err) - } - - // Look for the exported file - files, err := filepath.Glob(filepath.Join(tmpDir, "ratings-export-*.csv")) - if err != nil { - t.Fatalf("Failed to find export files: %v", err) - } - if len(files) == 0 { - t.Fatal("No ratings export file found") - } - - // Read the first found file - content, err := os.ReadFile(files[0]) - if err != nil { - t.Fatalf("Failed to read export file: %v", err) - } - - // Verify file content - fileContent := string(content) - expectedHeaders := "Title,Year,Rating10,RatedDate,IMDb ID" - if len(fileContent) == 0 || content[0] == 0 { - t.Error("Export file is empty") - } - if fileContent[:len(expectedHeaders)] != expectedHeaders { - t.Errorf("Expected headers '%s', got '%s'", expectedHeaders, fileContent[:len(expectedHeaders)]) - } - - // Check that all test ratings' movies are in the file - for _, rating := range testRatings { - if !strings.Contains(fileContent, rating.Movie.Title) { - t.Errorf("Export file does not contain movie title '%s'", rating.Movie.Title) - } - - // Verify rating value is present (as a string) - ratingStr := strconv.Itoa(int(rating.Rating)) - if !strings.Contains(fileContent, ratingStr) { - t.Errorf("Export file does not contain rating '%s'", ratingStr) - } - } -} - -// TestExportWatchlist tests the export of movie watchlist to a CSV file -func TestExportWatchlist(t *testing.T) { - // Create a temporary directory for test exports - tmpDir, err := os.MkdirTemp("", "watchlist_test") - if err != nil { - t.Fatalf("Failed to create temp directory: %v", err) - } - defer os.RemoveAll(tmpDir) - - // Create test configuration - cfg := &config.Config{ - Letterboxd: config.LetterboxdConfig{ - ExportDir: tmpDir, - }, - Export: config.ExportConfig{ - Format: "csv", - DateFormat: "2006-01-02", - }, - } - log := &MockLogger{} - - // Create test watchlist - testWatchlist := []api.WatchlistMovie{ - { - Movie: api.MovieInfo{ - Title: "Future Movie 1", - Year: 2022, - IDs: api.MovieIDs{ - IMDB: "tt1234567", - }, - }, - ListedAt: time.Now().Format(time.RFC3339), - Notes: "Must watch", - }, - { - Movie: api.MovieInfo{ - Title: "Future Movie 2", - Year: 2023, - IDs: api.MovieIDs{ - IMDB: "tt2345678", - }, - }, - ListedAt: time.Now().Add(-24 * time.Hour).Format(time.RFC3339), - Notes: "Looks interesting", - }, - } - - // Create exporter and export watchlist - exporter := NewLetterboxdExporter(cfg, log) - err = exporter.ExportWatchlist(testWatchlist) - if err != nil { - t.Fatalf("Failed to export watchlist: %v", err) - } - - // Check for the expected export file with fixed name - expectedFilePath := filepath.Join(tmpDir, "watchlist-export-test.csv") - if _, err := os.Stat(expectedFilePath); os.IsNotExist(err) { - t.Fatalf("Expected export file not found: %s", expectedFilePath) - } - - // Check file content - content, err := os.ReadFile(expectedFilePath) - if err != nil { - t.Fatalf("Failed to read export file: %v", err) - } - - // Verify file content - fileContent := string(content) - expectedHeaders := "Title,Year,ListedDate,Rating10,IMDb ID" - if len(fileContent) == 0 || content[0] == 0 { - t.Error("Export file is empty") - } - if fileContent[:len(expectedHeaders)] != expectedHeaders { - t.Errorf("Expected headers '%s', got '%s'", expectedHeaders, fileContent[:len(expectedHeaders)]) - } - - // Check that all watchlist movies are in the file - for _, item := range testWatchlist { - if !strings.Contains(fileContent, item.Movie.Title) { - t.Errorf("Export file does not contain movie title '%s'", item.Movie.Title) - } - - // Check notes if present - if item.Notes != "" && !strings.Contains(fileContent, item.Notes) { - t.Errorf("Export file does not contain notes '%s'", item.Notes) - } - } -} - -// TestExportLetterboxdFormat tests the export to Letterboxd import format -func TestExportLetterboxdFormat(t *testing.T) { - // Create a temporary directory for test exports - tmpDir, err := os.MkdirTemp("", "letterboxd_import_test") - if err != nil { - t.Fatalf("Failed to create temp directory: %v", err) - } - defer os.RemoveAll(tmpDir) - - // Create test configuration - cfg := &config.Config{ - Letterboxd: config.LetterboxdConfig{ - ExportDir: tmpDir, - }, - Export: config.ExportConfig{ - Format: "csv", - DateFormat: "2006-01-02", - }, - } - log := &MockLogger{} - - // Create test movies - testMovies := []api.Movie{ - { - Movie: api.MovieInfo{ - Title: "Test Movie 1", - Year: 2020, - IDs: api.MovieIDs{ - IMDB: "tt1234567", - TMDB: 1234, - }, - }, - LastWatchedAt: time.Now().Format(time.RFC3339), - Plays: 2, - }, - { - Movie: api.MovieInfo{ - Title: "Test Movie 2", - Year: 2021, - IDs: api.MovieIDs{ - IMDB: "tt2345678", - TMDB: 5678, - }, - }, - LastWatchedAt: time.Now().Add(-24 * time.Hour).Format(time.RFC3339), - Plays: 1, - }, - } - - // Create test ratings - testRatings := []api.Rating{ - { - Movie: api.MovieInfo{ - Title: "Test Movie 1", - Year: 2020, - IDs: api.MovieIDs{ - IMDB: "tt1234567", - }, - }, - Rating: 8, - RatedAt: time.Now().Format(time.RFC3339), - }, - } - - // Create exporter and export to Letterboxd format - exporter := NewLetterboxdExporter(cfg, log) - err = exporter.ExportLetterboxdFormat(testMovies, testRatings) - if err != nil { - t.Fatalf("Failed to export in Letterboxd format: %v", err) - } - - // Check for the expected export file with fixed name - expectedFilePath := filepath.Join(tmpDir, "letterboxd-import-test.csv") - if _, err := os.Stat(expectedFilePath); os.IsNotExist(err) { - t.Fatalf("Expected export file not found: %s", expectedFilePath) - } - - // Check file content - content, err := os.ReadFile(expectedFilePath) - if err != nil { - t.Fatalf("Failed to read export file: %v", err) - } - - // Verify file content - fileContent := string(content) - expectedHeaders := "Title,Year,imdbID,tmdbID,WatchedDate,Rating10,Rewatch" - if len(fileContent) == 0 || content[0] == 0 { - t.Error("Export file is empty") - } - if fileContent[:len(expectedHeaders)] != expectedHeaders { - t.Errorf("Expected headers '%s', got '%s'", expectedHeaders, fileContent[:len(expectedHeaders)]) - } - - // Check that all movies are in the file - for _, movie := range testMovies { - if !strings.Contains(fileContent, movie.Movie.Title) { - t.Errorf("Export file does not contain movie title '%s'", movie.Movie.Title) - } - - // Check IMDB ID is included - if !strings.Contains(fileContent, movie.Movie.IDs.IMDB) { - t.Errorf("Export file does not contain IMDB ID '%s'", movie.Movie.IDs.IMDB) - } - - // Check TMDB ID is included - tmdbID := strconv.Itoa(movie.Movie.IDs.TMDB) - if !strings.Contains(fileContent, tmdbID) { - t.Errorf("Export file does not contain TMDB ID '%s'", tmdbID) - } - } - - // Check rating is included - if !strings.Contains(fileContent, "8") { - t.Error("Export file does not contain the expected rating") - } - - // Check rewatch indicator for movie with multiple plays - if !strings.Contains(fileContent, "true") { - t.Error("Export file does not indicate rewatch properly") - } -} - -// TestGetTimeInConfigTimezone tests the getTimeInConfigTimezone function -func TestGetTimeInConfigTimezone(t *testing.T) { - // Test cases - testCases := []struct { - name string - timezone string - isValid bool - }{ - { - name: "Default timezone (empty)", - timezone: "", - isValid: true, - }, - { - name: "Valid timezone", - timezone: "America/New_York", - isValid: true, - }, - { - name: "Invalid timezone", - timezone: "Invalid/Timezone", - isValid: false, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - // Create mock logger and config - mockLogger := &MockLogger{} - cfg := &config.Config{ - Export: config.ExportConfig{ - Timezone: tc.timezone, - }, - } - - // Create exporter - exporter := NewLetterboxdExporter(cfg, mockLogger) - - // Get time in configured timezone - result := exporter.getTimeInConfigTimezone() - - // Check the result - if tc.timezone == "" { - // For empty timezone, should use UTC - assert.Equal(t, "export.using_default_timezone", mockLogger.lastMessage) - } else if tc.isValid { - // For valid timezone, should use the configured timezone - assert.Equal(t, "export.using_configured_timezone", mockLogger.lastMessage) - - // Verify the timezone data is in the log - if data, ok := mockLogger.lastData["timezone"]; ok { - assert.Equal(t, tc.timezone, data) - } else { - t.Errorf("Expected timezone in log data") - } - - // Verify the time is formatted correctly - if timeStr, ok := mockLogger.lastData["time"]; ok { - _, err := time.Parse(time.RFC3339, timeStr.(string)) - assert.NoError(t, err, "Time should be in RFC3339 format") - } else { - t.Errorf("Expected time in log data") - } - } else { - // For invalid timezone, should log a warning and return UTC time - assert.Equal(t, "export.timezone_load_failed", mockLogger.lastMessage) - - // Verify the error message contains the timezone - if data, ok := mockLogger.lastData["timezone"]; ok { - assert.Equal(t, tc.timezone, data) - } else { - t.Errorf("Expected timezone in log data") - } - - // Verify there's an error message - assert.Contains(t, mockLogger.lastData, "error") - } - - // Verify the result is a valid time - nowUTC := time.Now().UTC() - timeDiff := result.Sub(nowUTC) - - // The time difference should be small (within a few seconds) - // or match the timezone offset if using a valid timezone - assert.True(t, timeDiff.Seconds() < 5, "Time difference should be small") - }) - } -} \ No newline at end of file diff --git a/pkg/i18n/i18n.go b/pkg/i18n/i18n.go deleted file mode 100644 index d28bb32..0000000 --- a/pkg/i18n/i18n.go +++ /dev/null @@ -1,134 +0,0 @@ -package i18n - -import ( - "encoding/json" - "fmt" - "os" - "path/filepath" - - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger" - "github.com/nicksnyder/go-i18n/v2/i18n" - "golang.org/x/text/language" -) - -// Translator handles all internationalization operations -type Translator struct { - bundle *i18n.Bundle - config *config.I18nConfig - log logger.Logger - localizer *i18n.Localizer -} - -// NewTranslator creates a new translator instance -func NewTranslator(cfg *config.I18nConfig, log logger.Logger) (*Translator, error) { - bundle := i18n.NewBundle(language.English) - bundle.RegisterUnmarshalFunc("json", json.Unmarshal) - - t := &Translator{ - bundle: bundle, - config: cfg, - log: log, - } - - if err := t.loadTranslations(); err != nil { - return nil, err - } - - t.localizer = i18n.NewLocalizer(bundle, cfg.Language, cfg.DefaultLanguage) - return t, nil -} - -// loadTranslations loads all translation files from the locales directory -func (t *Translator) loadTranslations() error { - t.log.Debug("i18n.loading_translations", map[string]interface{}{ - "dir": t.config.LocalesDir, - }) - - entries, err := os.ReadDir(t.config.LocalesDir) - if err != nil { - return fmt.Errorf("failed to read locales directory: %w", err) - } - - t.log.Debug("i18n.found_files", map[string]interface{}{ - "count": len(entries), - }) - - for _, entry := range entries { - if entry.IsDir() { - continue - } - - if filepath.Ext(entry.Name()) != ".json" { - continue - } - - path := filepath.Join(t.config.LocalesDir, entry.Name()) - if _, err := t.bundle.LoadMessageFile(path); err != nil { - t.log.Warn("errors.translation_file_load_failed", map[string]interface{}{ - "path": path, - "error": err.Error(), - }) - continue - } - - t.log.Debug("i18n.translation_file_loaded", map[string]interface{}{ - "path": path, - }) - } - - return nil -} - -// Translate returns the translated message for the given message ID -func (t *Translator) Translate(messageID string, templateData map[string]interface{}) string { - // Simple protection against recursion - if messageID == "" { - return "" - } - - // Prevent recursion for error messages that might be logged during translation - if messageID == "errors.translation_failed" || - messageID == "errors.translation_file_load_failed" || - messageID == "errors.translation_not_found" { - return messageID - } - - // Create a message to translate - msg := i18n.Message{ - ID: messageID, - } - - // Attempt translation - translation, err := t.localizer.Localize(&i18n.LocalizeConfig{ - DefaultMessage: &msg, - TemplateData: templateData, - }) - - if err != nil { - // Log a warning for missing translations - t.log.Warn("errors.translation_not_found", map[string]interface{}{ - "messageID": messageID, - }) - // Return the original ID - return messageID - } - - // If translation equals messageID, it means no translation was found - if translation == messageID { - t.log.Warn("errors.translation_not_found", map[string]interface{}{ - "messageID": messageID, - }) - } - - return translation -} - -// SetLanguage changes the current language -func (t *Translator) SetLanguage(lang string) { - t.localizer = i18n.NewLocalizer(t.bundle, lang, t.config.DefaultLanguage) - t.config.Language = lang - t.log.Info("i18n.language_changed", map[string]interface{}{ - "language": lang, - }) -} \ No newline at end of file diff --git a/pkg/i18n/i18n_test.go b/pkg/i18n/i18n_test.go deleted file mode 100644 index 6e31b51..0000000 --- a/pkg/i18n/i18n_test.go +++ /dev/null @@ -1,280 +0,0 @@ -package i18n - -import ( - "os" - "path/filepath" - "testing" - - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger" -) - -// MockLogger implements the Logger interface for testing -type MockLogger struct { - infoMessages []string - errorMessages []string - warnMessages []string - debugMessages []string -} - -func NewMockLogger() *MockLogger { - return &MockLogger{ - infoMessages: []string{}, - errorMessages: []string{}, - warnMessages: []string{}, - debugMessages: []string{}, - } -} - -func (m *MockLogger) Info(messageID string, data ...map[string]interface{}) { - m.infoMessages = append(m.infoMessages, messageID) -} -func (m *MockLogger) Error(messageID string, data ...map[string]interface{}) { - m.errorMessages = append(m.errorMessages, messageID) -} -func (m *MockLogger) Warn(messageID string, data ...map[string]interface{}) { - m.warnMessages = append(m.warnMessages, messageID) -} -func (m *MockLogger) Debug(messageID string, data ...map[string]interface{}) { - m.debugMessages = append(m.debugMessages, messageID) -} -func (m *MockLogger) Infof(format string, data map[string]interface{}) {} -func (m *MockLogger) Errorf(format string, data map[string]interface{}) {} -func (m *MockLogger) Warnf(format string, data map[string]interface{}) {} -func (m *MockLogger) Debugf(format string, data map[string]interface{}) {} -func (m *MockLogger) SetLogLevel(level string) {} -func (m *MockLogger) SetLogFile(filePath string) error { return nil } -func (m *MockLogger) SetTranslator(t logger.Translator) {} - -func TestNewTranslator(t *testing.T) { - // Create a temporary directory for test translation files - tempDir, err := os.MkdirTemp("", "i18n_test") - if err != nil { - t.Fatalf("Failed to create temp directory: %v", err) - } - defer os.RemoveAll(tempDir) - - // Create a basic English translation file - enContent := `{ - "test": { - "message": "This is a test message" - } - }` - err = os.WriteFile(filepath.Join(tempDir, "en.json"), []byte(enContent), 0644) - if err != nil { - t.Fatalf("Failed to create test translation file: %v", err) - } - - // Create a basic French translation file - frContent := `{ - "test": { - "message": "C'est un message de test" - } - }` - err = os.WriteFile(filepath.Join(tempDir, "fr.json"), []byte(frContent), 0644) - if err != nil { - t.Fatalf("Failed to create test translation file: %v", err) - } - - tests := []struct { - name string - config config.I18nConfig - expectError bool - expectedLang string - fallbackLang string - expectLogEntry bool - }{ - { - name: "valid configuration", - config: config.I18nConfig{ - DefaultLanguage: "en", - Language: "en", - LocalesDir: tempDir, - }, - expectError: false, - expectedLang: "en", - fallbackLang: "en", - expectLogEntry: true, - }, - { - name: "invalid locale directory", - config: config.I18nConfig{ - DefaultLanguage: "en", - Language: "en", - LocalesDir: "/nonexistent/dir", - }, - expectError: true, - expectLogEntry: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - mockLog := NewMockLogger() - translator, err := NewTranslator(&tt.config, mockLog) - - if tt.expectError { - if err == nil { - t.Error("Expected error but got none") - } - return - } - - if err != nil { - t.Fatalf("Unexpected error: %v", err) - } - - if translator == nil { - t.Fatal("Expected non-nil translator") - } - - if translator.config.Language != tt.expectedLang { - t.Errorf("Expected language %s, got %s", tt.expectedLang, translator.config.Language) - } - - if translator.config.DefaultLanguage != tt.fallbackLang { - t.Errorf("Expected default language %s, got %s", tt.fallbackLang, translator.config.DefaultLanguage) - } - - if len(mockLog.debugMessages) == 0 && tt.expectLogEntry { - t.Error("Expected debug log entries for loaded translation files") - } - }) - } -} - -func TestTranslate(t *testing.T) { - // Create a temporary directory for test translation files - tempDir, err := os.MkdirTemp("", "i18n_test") - if err != nil { - t.Fatalf("Failed to create temp directory: %v", err) - } - defer os.RemoveAll(tempDir) - - // Create a basic English translation file - enContent := `{ - "test.message": "This is a test message", - "test.with_data": "Hello, {{.name}}!" - }` - err = os.WriteFile(filepath.Join(tempDir, "en.json"), []byte(enContent), 0644) - if err != nil { - t.Fatalf("Failed to create test translation file: %v", err) - } - - mockLog := NewMockLogger() - translator, err := NewTranslator(&config.I18nConfig{ - DefaultLanguage: "en", - Language: "en", - LocalesDir: tempDir, - }, mockLog) - - if err != nil { - t.Fatalf("Failed to create translator: %v", err) - } - - tests := []struct { - name string - messageID string - templateData map[string]interface{} - expected string - }{ - { - name: "existing message", - messageID: "test.message", - templateData: nil, - expected: "This is a test message", - }, - { - name: "message with template data", - messageID: "test.with_data", - templateData: map[string]interface{}{ - "name": "John", - }, - expected: "Hello, John!", - }, - { - name: "non-existent message", - messageID: "test.nonexistent", - templateData: nil, - expected: "test.nonexistent", // Falls back to message ID - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := translator.Translate(tt.messageID, tt.templateData) - - if result != tt.expected { - t.Errorf("Expected translation '%s', got '%s'", tt.expected, result) - } - - // Check if warn log was created for non-existent messages - if tt.name == "non-existent message" && len(mockLog.warnMessages) == 0 { - t.Error("Expected warning log for non-existent message") - } - }) - } -} - -func TestSetLanguage(t *testing.T) { - // Create a temporary directory for test translation files - tempDir, err := os.MkdirTemp("", "i18n_test") - if err != nil { - t.Fatalf("Failed to create temp directory: %v", err) - } - defer os.RemoveAll(tempDir) - - // Create a basic English translation file - enContent := `{ - "test.message": "This is a test message" - }` - err = os.WriteFile(filepath.Join(tempDir, "en.json"), []byte(enContent), 0644) - if err != nil { - t.Fatalf("Failed to create test translation file: %v", err) - } - - // Create a basic French translation file - frContent := `{ - "test.message": "C'est un message de test" - }` - err = os.WriteFile(filepath.Join(tempDir, "fr.json"), []byte(frContent), 0644) - if err != nil { - t.Fatalf("Failed to create test translation file: %v", err) - } - - mockLog := NewMockLogger() - translator, err := NewTranslator(&config.I18nConfig{ - DefaultLanguage: "en", - Language: "en", - LocalesDir: tempDir, - }, mockLog) - - if err != nil { - t.Fatalf("Failed to create translator: %v", err) - } - - // Test initial language - result := translator.Translate("test.message", nil) - if result != "This is a test message" { - t.Errorf("Expected English message, got: %s", result) - } - - // Change language to French - translator.SetLanguage("fr") - - // Check config was updated - if translator.config.Language != "fr" { - t.Errorf("Expected language to be set to 'fr', got '%s'", translator.config.Language) - } - - // Check log message was created - if len(mockLog.infoMessages) == 0 { - t.Error("Expected info log for language change") - } - - // Test French translation - result = translator.Translate("test.message", nil) - if result != "C'est un message de test" { - t.Errorf("Expected French message, got: %s", result) - } -} \ No newline at end of file diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go deleted file mode 100644 index 790bdc9..0000000 --- a/pkg/logger/logger.go +++ /dev/null @@ -1,282 +0,0 @@ -package logger - -import ( - "fmt" - "io" - "os" - "strings" - - "github.com/sirupsen/logrus" -) - -// Translator interface for i18n support -type Translator interface { - Translate(messageID string, templateData map[string]interface{}) string -} - -// Logger interface defines the logging methods -type Logger interface { - Info(messageID string, data ...map[string]interface{}) - Infof(messageID string, data map[string]interface{}) - Error(messageID string, data ...map[string]interface{}) - Errorf(messageID string, data map[string]interface{}) - Warn(messageID string, data ...map[string]interface{}) - Warnf(messageID string, data map[string]interface{}) - Debug(messageID string, data ...map[string]interface{}) - Debugf(messageID string, data map[string]interface{}) - SetLogLevel(level string) - SetLogFile(path string) error - SetTranslator(t Translator) -} - -// VisualFormatter provides a more readable, visual log format -type VisualFormatter struct { - isQuietMode bool -} - -// Format implements logrus.Formatter interface -func (f *VisualFormatter) Format(entry *logrus.Entry) ([]byte, error) { - timestamp := entry.Time.Format("15:04:05") - - var icon, levelStr string - switch entry.Level { - case logrus.ErrorLevel: - icon = "❌" - levelStr = "ERROR" - case logrus.WarnLevel: - icon = "⚠️ " - levelStr = "WARN " - case logrus.InfoLevel: - // Special icons for specific messages - if strings.Contains(entry.Message, "Successfully exported") { - icon = "✅" - } else if strings.Contains(entry.Message, "Retrieved") && strings.Contains(entry.Message, "movies") { - icon = "📥" - } else if strings.Contains(entry.Message, "Scheduler") { - icon = "⏰" - } else if strings.Contains(entry.Message, "Starting") || strings.Contains(entry.Message, "Initializing") { - icon = "🚀" - } else { - icon = "ℹ️ " - } - levelStr = "INFO " - case logrus.DebugLevel: - icon = "🔧" - levelStr = "DEBUG" - default: - icon = "📝" - levelStr = "LOG " - } - - // In quiet mode, format important messages more prominently - if f.isQuietMode { - if strings.Contains(entry.Message, "Successfully exported") { - return []byte(fmt.Sprintf("\n%s %s SUCCESS: %s\n", icon, timestamp, entry.Message)), nil - } else if strings.Contains(entry.Message, "Retrieved") && strings.Contains(entry.Message, "movies") { - return []byte(fmt.Sprintf("%s %s DATA: %s\n", icon, timestamp, entry.Message)), nil - } else if strings.Contains(entry.Message, "Scheduler is running") { - return []byte(fmt.Sprintf("%s %s STATUS: %s\n\n", icon, timestamp, entry.Message)), nil - } - } - - // Standard format - return []byte(fmt.Sprintf("%s %s [%s] %s\n", icon, timestamp, levelStr, entry.Message)), nil -} - -// DualWriter writes to both file and stdout, with filtering for stdout -type DualWriter struct { - fileWriter io.Writer - stdoutWriter io.Writer - quietMode bool -} - -// Write implements io.Writer interface -func (dw *DualWriter) Write(p []byte) (n int, err error) { - // Always write to file - if dw.fileWriter != nil { - dw.fileWriter.Write(p) - } - - // Filter stdout output in quiet mode - if dw.quietMode { - message := string(p) - // Only show important messages in quiet mode - if strings.Contains(message, "Successfully exported") || - strings.Contains(message, "Scheduler is running") || - strings.Contains(message, "Retrieved") || - strings.Contains(message, "❌") || // Error icon - strings.Contains(message, "level=error") || - strings.Contains(message, "level=fatal") { - return dw.stdoutWriter.Write(p) - } - // Don't write to stdout for filtered messages - return len(p), nil - } - - // In non-quiet mode, write to stdout - return dw.stdoutWriter.Write(p) -} - -// StandardLogger wraps logrus.Logger -type StandardLogger struct { - *logrus.Logger - translator Translator - fileWriter *os.File - isQuietMode bool -} - -// NewLogger creates a new logger instance -func NewLogger() Logger { - log := logrus.New() - log.SetOutput(os.Stdout) - - // Check if quiet mode is enabled - quietMode := os.Getenv("EXPORT_QUIET_MODE") == "true" - - // Set visual formatter - log.SetFormatter(&VisualFormatter{ - isQuietMode: quietMode, - }) - log.SetLevel(logrus.InfoLevel) - - return &StandardLogger{ - Logger: log, - isQuietMode: quietMode, - } -} - -// SetTranslator sets the translator for the logger -func (l *StandardLogger) SetTranslator(t Translator) { - l.translator = t -} - -// translate handles message translation if a translator is available -func (l *StandardLogger) translate(messageID string, data map[string]interface{}) string { - // No translation if no translator - if l.translator == nil { - return messageID - } - - // Prevent recursion from specific error types - if messageID == "" || messageID == "errors.translation_failed" { - return messageID - } - - // Sanitize the data to avoid nil map issues - if data == nil { - data = make(map[string]interface{}) - } - - return l.translator.Translate(messageID, data) -} - -// Info logs an info level message with translation -func (l *StandardLogger) Info(messageID string, data ...map[string]interface{}) { - var templateData map[string]interface{} - if len(data) > 0 { - templateData = data[0] - } - message := l.translate(messageID, templateData) - - // Enhance certain messages for better readability - if strings.Contains(messageID, "scheduler.started") { - message = "Scheduler started successfully!" - } else if strings.Contains(messageID, "scheduler.waiting") { - message = "Scheduler is running. Press Ctrl+C to stop..." - } - - l.Logger.Info(message) -} - -// Infof logs a formatted info level message with translation -func (l *StandardLogger) Infof(messageID string, data map[string]interface{}) { - l.Logger.Info(l.translate(messageID, data)) -} - -// Error logs an error level message with translation -func (l *StandardLogger) Error(messageID string, data ...map[string]interface{}) { - var templateData map[string]interface{} - if len(data) > 0 { - templateData = data[0] - } - l.Logger.Error(l.translate(messageID, templateData)) -} - -// Errorf logs a formatted error level message with translation -func (l *StandardLogger) Errorf(messageID string, data map[string]interface{}) { - l.Logger.Error(l.translate(messageID, data)) -} - -// Warn logs a warning level message with translation -func (l *StandardLogger) Warn(messageID string, data ...map[string]interface{}) { - var templateData map[string]interface{} - if len(data) > 0 { - templateData = data[0] - } - - // Skip translation warnings in quiet mode to reduce noise - if l.isQuietMode && strings.Contains(messageID, "translation_not_found") { - return - } - - l.Logger.Warn(l.translate(messageID, templateData)) -} - -// Warnf logs a formatted warning level message with translation -func (l *StandardLogger) Warnf(messageID string, data map[string]interface{}) { - // Skip translation warnings in quiet mode to reduce noise - if l.isQuietMode && strings.Contains(messageID, "translation_not_found") { - return - } - l.Logger.Warn(l.translate(messageID, data)) -} - -// Debug logs a debug level message with translation -func (l *StandardLogger) Debug(messageID string, data ...map[string]interface{}) { - var templateData map[string]interface{} - if len(data) > 0 { - templateData = data[0] - } - l.Logger.Debug(l.translate(messageID, templateData)) -} - -// Debugf logs a formatted debug level message with translation -func (l *StandardLogger) Debugf(messageID string, data map[string]interface{}) { - l.Logger.Debug(l.translate(messageID, data)) -} - -// SetLogLevel sets the logging level -func (l *StandardLogger) SetLogLevel(level string) { - switch level { - case "debug": - l.SetLevel(logrus.DebugLevel) - case "info": - l.SetLevel(logrus.InfoLevel) - case "warn": - l.SetLevel(logrus.WarnLevel) - case "error": - l.SetLevel(logrus.ErrorLevel) - default: - l.SetLevel(logrus.InfoLevel) - } -} - -// SetLogFile sets up dual output to both file and stdout -func (l *StandardLogger) SetLogFile(path string) error { - file, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666) - if err != nil { - return err - } - - l.fileWriter = file - - // Create dual writer - dualWriter := &DualWriter{ - fileWriter: file, - stdoutWriter: os.Stdout, - quietMode: l.isQuietMode, - } - - l.SetOutput(dualWriter) - return nil -} \ No newline at end of file diff --git a/pkg/logger/logger_test.go b/pkg/logger/logger_test.go deleted file mode 100644 index 209adaa..0000000 --- a/pkg/logger/logger_test.go +++ /dev/null @@ -1,369 +0,0 @@ -package logger - -import ( - "bytes" - "fmt" - "os" - "path/filepath" - "strings" - "testing" - - "github.com/sirupsen/logrus" -) - -// MockTranslator implements the Translator interface for testing -type MockTranslator struct { - translations map[string]string -} - -func NewMockTranslator() *MockTranslator { - return &MockTranslator{ - translations: map[string]string{ - "test.info": "Test info message", - "test.error": "Test error message", - "test.warn": "Test warning message", - "test.debug": "Test debug message", - "test.with_data": "Message with data: {data}", - }, - } -} - -func (m *MockTranslator) Translate(messageID string, templateData map[string]interface{}) string { - msg, ok := m.translations[messageID] - if !ok { - return messageID - } - if templateData != nil { - for key, value := range templateData { - msg = strings.Replace(msg, "{"+key+"}", fmt.Sprintf("%v", value), -1) - } - } - return msg -} - -func TestNewLogger(t *testing.T) { - logger := NewLogger() - if logger == nil { - t.Error("Expected non-nil logger") - } - - // We can't access the internal logger directly in the interface-based implementation - // Instead, test that basic logging works - var buf bytes.Buffer - stdLogger, ok := logger.(*StandardLogger) - if !ok { - t.Fatal("Expected StandardLogger implementation") - } - stdLogger.SetOutput(&buf) - - logger.Info("test message") - if !strings.Contains(buf.String(), "test message") { - t.Error("Expected log to contain the test message") - } -} - -func TestSetLogLevel(t *testing.T) { - tests := []struct { - name string - level string - expectedLevel logrus.Level - }{ - { - name: "debug level", - level: "debug", - expectedLevel: logrus.DebugLevel, - }, - { - name: "info level", - level: "info", - expectedLevel: logrus.InfoLevel, - }, - { - name: "warn level", - level: "warn", - expectedLevel: logrus.WarnLevel, - }, - { - name: "error level", - level: "error", - expectedLevel: logrus.ErrorLevel, - }, - { - name: "invalid level defaults to info", - level: "invalid", - expectedLevel: logrus.InfoLevel, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - logger := NewLogger() - stdLogger, ok := logger.(*StandardLogger) - if !ok { - t.Fatal("Expected StandardLogger implementation") - } - - logger.SetLogLevel(tt.level) - if stdLogger.GetLevel() != tt.expectedLevel { - t.Errorf("Expected level %v, got %v", tt.expectedLevel, stdLogger.GetLevel()) - } - }) - } -} - -func TestSetLogFile(t *testing.T) { - // Create a temporary directory for test files - tmpDir, err := os.MkdirTemp("", "logger_test") - if err != nil { - t.Fatalf("Failed to create temp directory: %v", err) - } - defer os.RemoveAll(tmpDir) - - tests := []struct { - name string - filePath string - expectError bool - }{ - { - name: "valid file path", - filePath: filepath.Join(tmpDir, "test.log"), - expectError: false, - }, - { - name: "invalid directory", - filePath: filepath.Join(tmpDir, "nonexistent", "test.log"), - expectError: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - logger := NewLogger() - err := logger.SetLogFile(tt.filePath) - - if tt.expectError && err == nil { - t.Error("Expected error but got none") - } else if !tt.expectError && err != nil { - t.Errorf("Unexpected error: %v", err) - } - - if !tt.expectError { - // Check if file was created - if _, err := os.Stat(tt.filePath); os.IsNotExist(err) { - t.Error("Expected log file to be created") - } - } - }) - } -} - -func TestLoggingWithTranslation(t *testing.T) { - var buf bytes.Buffer - logger := NewLogger() - stdLogger, ok := logger.(*StandardLogger) - if !ok { - t.Fatal("Expected StandardLogger implementation") - } - stdLogger.SetOutput(&buf) - logger.SetLogLevel("debug") - - mockTranslator := NewMockTranslator() - logger.SetTranslator(mockTranslator) - - tests := []struct { - name string - logFunc func(messageID string, data map[string]interface{}) - messageID string - data map[string]interface{} - expectInLog string - }{ - { - name: "info message", - logFunc: func(m string, d map[string]interface{}) { - logger.Info(m, d) - }, - messageID: "test.info", - data: nil, - expectInLog: "Test info message", - }, - { - name: "error message", - logFunc: func(m string, d map[string]interface{}) { - logger.Error(m, d) - }, - messageID: "test.error", - data: nil, - expectInLog: "Test error message", - }, - { - name: "warning message", - logFunc: func(m string, d map[string]interface{}) { - logger.Warn(m, d) - }, - messageID: "test.warn", - data: nil, - expectInLog: "Test warning message", - }, - { - name: "debug message", - logFunc: func(m string, d map[string]interface{}) { - logger.Debug(m, d) - }, - messageID: "test.debug", - data: nil, - expectInLog: "Test debug message", - }, - { - name: "message with template data", - logFunc: func(m string, d map[string]interface{}) { - logger.Info(m, d) - }, - messageID: "test.with_data", - data: map[string]interface{}{ - "data": "test value", - }, - expectInLog: "Message with data: test value", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - buf.Reset() - tt.logFunc(tt.messageID, tt.data) - logOutput := buf.String() - - if !strings.Contains(logOutput, tt.expectInLog) { - t.Errorf("Expected log to contain '%s', got '%s'", tt.expectInLog, logOutput) - } - }) - } -} - -func TestLoggingLevelFiltering(t *testing.T) { - var buf bytes.Buffer - logger := NewLogger() - stdLogger, ok := logger.(*StandardLogger) - if !ok { - t.Fatal("Expected StandardLogger implementation") - } - stdLogger.SetOutput(&buf) - logger.SetLogLevel("info") - - mockTranslator := NewMockTranslator() - logger.SetTranslator(mockTranslator) - - // Debug message should not appear - logger.Debug("test.debug", nil) - if strings.Contains(buf.String(), "Test debug message") { - t.Error("Debug message should not appear when log level is info") - } - - // Info message should appear - buf.Reset() - logger.Info("test.info", nil) - if !strings.Contains(buf.String(), "Test info message") { - t.Error("Info message should appear when log level is info") - } -} - -func TestLoggingWithoutTranslator(t *testing.T) { - var buf bytes.Buffer - logger := NewLogger() - stdLogger, ok := logger.(*StandardLogger) - if !ok { - t.Fatal("Expected StandardLogger implementation") - } - stdLogger.SetOutput(&buf) - logger.SetLogLevel("info") - - // Log without translator should use message ID directly - logger.Info("direct.message", nil) - if !strings.Contains(buf.String(), "direct.message") { - t.Error("Message ID should be used directly when no translator is set") - } -} - -// TestFormattingMethods tests all the formatting methods that use the same interface -func TestFormattingMethods(t *testing.T) { - // Create a new logger and capture its output - logger := NewLogger() - stdLogger, ok := logger.(*StandardLogger) - if !ok { - t.Fatal("Expected StandardLogger implementation") - } - - var buf bytes.Buffer - stdLogger.SetOutput(&buf) - - // Set the log level to "debug" to ensure all messages are logged - logger.SetLogLevel("debug") - - // Set a mock translator - mockTranslator := NewMockTranslator() - logger.SetTranslator(mockTranslator) - - tests := []struct { - name string - method func(string, map[string]interface{}) - messageID string - data map[string]interface{} - expectInLog string - }{ - { - name: "Infof method", - method: func(m string, d map[string]interface{}) { - logger.Infof(m, d) - }, - messageID: "test.info", - data: map[string]interface{}{"data": "formatted info"}, - expectInLog: "Test info message", - }, - { - name: "Errorf method", - method: func(m string, d map[string]interface{}) { - logger.Errorf(m, d) - }, - messageID: "test.error", - data: map[string]interface{}{"data": "formatted error"}, - expectInLog: "Test error message", - }, - { - name: "Warnf method", - method: func(m string, d map[string]interface{}) { - logger.Warnf(m, d) - }, - messageID: "test.warn", - data: map[string]interface{}{"data": "formatted warning"}, - expectInLog: "Test warning message", - }, - { - name: "Debugf method", - method: func(m string, d map[string]interface{}) { - logger.Debugf(m, d) - }, - messageID: "test.debug", - data: map[string]interface{}{"data": "formatted debug"}, - expectInLog: "Test debug message", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - buf.Reset() - tt.method(tt.messageID, tt.data) - logOutput := buf.String() - - if !strings.Contains(logOutput, tt.expectInLog) { - t.Errorf("Expected log to contain '%s', got '%s'", tt.expectInLog, logOutput) - } - }) - } - - // Test with nil translator - logger.SetTranslator(nil) - buf.Reset() - logger.Infof("direct.message", nil) - if !strings.Contains(buf.String(), "direct.message") { - t.Error("Expected log to contain the direct message when no translator is set") - } -} \ No newline at end of file diff --git a/pkg/scheduler/scheduler.go b/pkg/scheduler/scheduler.go deleted file mode 100644 index 313aef0..0000000 --- a/pkg/scheduler/scheduler.go +++ /dev/null @@ -1,132 +0,0 @@ -package scheduler - -import ( - "fmt" - "os" - "os/exec" - "os/signal" - "strings" - "syscall" - "time" - - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger" - "github.com/robfig/cron/v3" -) - -// Scheduler manages the scheduling of export jobs -type Scheduler struct { - config *config.Config - log logger.Logger - cron *cron.Cron -} - -// NewScheduler creates a new scheduler -func NewScheduler(cfg *config.Config, log logger.Logger) *Scheduler { - return &Scheduler{ - config: cfg, - log: log, - cron: cron.New(), - } -} - -// Start initializes the scheduler from environment variables -func (s *Scheduler) Start() error { - // Get schedule from environment variable - schedule := os.Getenv("EXPORT_SCHEDULE") - if schedule == "" { - s.log.Info("scheduler.no_schedule_defined", map[string]interface{}{ - "message": "No EXPORT_SCHEDULE environment variable defined. Scheduler will not run.", - }) - return nil - } - - // Get export mode and type from environment variables or use defaults - exportMode := os.Getenv("EXPORT_MODE") - if exportMode == "" { - exportMode = "complete" // Default to complete mode - } - - exportType := os.Getenv("EXPORT_TYPE") - if exportType == "" { - exportType = "all" // Default to export all - } - - s.log.Info("scheduler.starting", map[string]interface{}{ - "schedule": schedule, - "export_mode": exportMode, - "export_type": exportType, - }) - - // Add the job to the cron scheduler - _, err := s.cron.AddFunc(schedule, func() { - s.runExport(exportMode, exportType) - }) - if err != nil { - s.log.Error("scheduler.invalid_schedule", map[string]interface{}{ - "schedule": schedule, - "error": err.Error(), - "details": "Format should be standard cron format: minute hour day-of-month month day-of-week", - }) - return fmt.Errorf("invalid schedule format: %w", err) - } - - // Start the cron scheduler - s.cron.Start() - - entries := s.cron.Entries() - if len(entries) > 0 { - s.log.Info("scheduler.started", map[string]interface{}{ - "next_run": entries[0].Next.Format(time.RFC3339), - }) - } else { - s.log.Warn("scheduler.no_entries", map[string]interface{}{ - "message": "Scheduler started but no entries were added", - }) - } - - // Set up a signal handler to gracefully shut down the scheduler - signalChan := make(chan os.Signal, 1) - signal.Notify(signalChan, syscall.SIGINT, syscall.SIGTERM) - go func() { - <-signalChan - s.Stop() - }() - - return nil -} - -// Stop gracefully stops the scheduler -func (s *Scheduler) Stop() { - if s.cron != nil { - s.log.Info("scheduler.stopping", nil) - ctx := s.cron.Stop() - <-ctx.Done() - s.log.Info("scheduler.stopped", nil) - } -} - -// runExport executes the export command with the specified mode and type -func (s *Scheduler) runExport(mode, exportType string) { - s.log.Info("scheduler.running_export", map[string]interface{}{ - "mode": mode, - "type": exportType, - }) - - // Create command to run export - cmd := exec.Command(os.Args[0], "export", "--mode", mode, "--export", exportType) - - // Get output - output, err := cmd.CombinedOutput() - if err != nil { - s.log.Error("scheduler.export_failed", map[string]interface{}{ - "error": err.Error(), - "output": strings.TrimSpace(string(output)), - }) - return - } - - s.log.Info("scheduler.export_completed", map[string]interface{}{ - "output": strings.TrimSpace(string(output)), - }) -} \ No newline at end of file diff --git a/pkg/scheduler/scheduler_test.go b/pkg/scheduler/scheduler_test.go deleted file mode 100644 index 47f6f66..0000000 --- a/pkg/scheduler/scheduler_test.go +++ /dev/null @@ -1,119 +0,0 @@ -package scheduler - -import ( - "os" - "testing" - "time" - - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/config" - "github.com/JohanDevl/Export_Trakt_4_Letterboxd/pkg/logger" -) - -// MockLogger pour les tests -type MockLogger struct{} - -func (m *MockLogger) Debug(messageID string, data ...map[string]interface{}) {} -func (m *MockLogger) Debugf(messageID string, data map[string]interface{}) {} -func (m *MockLogger) Info(messageID string, data ...map[string]interface{}) {} -func (m *MockLogger) Infof(messageID string, data map[string]interface{}) {} -func (m *MockLogger) Warn(messageID string, data ...map[string]interface{}) {} -func (m *MockLogger) Warnf(messageID string, data map[string]interface{}) {} -func (m *MockLogger) Error(messageID string, data ...map[string]interface{}) {} -func (m *MockLogger) Errorf(messageID string, data map[string]interface{}) {} -func (m *MockLogger) SetLogLevel(level string) {} -func (m *MockLogger) SetLogFile(filepath string) error { return nil } -func (m *MockLogger) SetTranslator(translator logger.Translator) {} - -func TestNewScheduler(t *testing.T) { - // Créer une configuration et un logger mock - cfg := &config.Config{} - log := &MockLogger{} - - // Créer un nouveau scheduler - sched := NewScheduler(cfg, log) - - // Vérifier que le scheduler a été créé correctement - if sched == nil { - t.Error("NewScheduler() a retourné nil") - } - if sched.config != cfg { - t.Error("NewScheduler() n'a pas correctement attribué la configuration") - } -} - -func TestScheduler_Start_NoSchedule(t *testing.T) { - // Créer une configuration et un logger mock - cfg := &config.Config{} - log := &MockLogger{} - - // S'assurer que la variable d'environnement EXPORT_SCHEDULE n'est pas définie - os.Unsetenv("EXPORT_SCHEDULE") - - // Créer un nouveau scheduler - sched := NewScheduler(cfg, log) - - // Démarrer le scheduler - err := sched.Start() - - // Vérifier qu'il n'y a pas d'erreur - if err != nil { - t.Errorf("Start() a retourné une erreur: %v", err) - } -} - -func TestScheduler_Start_InvalidSchedule(t *testing.T) { - // Créer une configuration et un logger mock - cfg := &config.Config{} - log := &MockLogger{} - - // Définir une planification non valide - os.Setenv("EXPORT_SCHEDULE", "invalid-schedule") - defer os.Unsetenv("EXPORT_SCHEDULE") - - // Créer un nouveau scheduler - sched := NewScheduler(cfg, log) - - // Démarrer le scheduler - err := sched.Start() - - // Vérifier qu'il y a une erreur - if err == nil { - t.Error("Start() n'a pas retourné d'erreur avec une planification non valide") - } -} - -func TestScheduler_Start_ValidSchedule(t *testing.T) { - // Créer une configuration et un logger mock - cfg := &config.Config{} - log := &MockLogger{} - - // Définir une planification valide - os.Setenv("EXPORT_SCHEDULE", "* * * * *") // Toutes les minutes - defer os.Unsetenv("EXPORT_SCHEDULE") - - // Créer un nouveau scheduler - sched := NewScheduler(cfg, log) - - // Démarrer le scheduler dans une goroutine - errChan := make(chan error, 1) - go func() { - errChan <- sched.Start() - }() - - // Attendre un peu pour s'assurer que le scheduler démarre - time.Sleep(100 * time.Millisecond) - - // Arrêter le scheduler - sched.Stop() - - // Vérifier qu'il n'y a pas d'erreur - select { - case err := <-errChan: - if err != nil { - t.Errorf("Start() a retourné une erreur: %v", err) - } - case <-time.After(1 * time.Second): - // La méthode Start() n'a pas encore retourné après 1 seconde - // C'est normal si elle bloque indéfiniment - } -} \ No newline at end of file diff --git a/scripts/coverage.sh b/scripts/coverage.sh deleted file mode 100755 index 2e047c8..0000000 --- a/scripts/coverage.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -# Run tests and create coverage profile -go test -coverprofile=coverage.txt -covermode=atomic ./pkg/... - -# For debug purposes, print the coverage -go tool cover -func=coverage.txt - -# Check if we meet the 70% coverage threshold -COVERAGE=$(go tool cover -func=coverage.txt | grep total | awk '{print $3}' | sed 's/%//') -THRESHOLD=70.0 - -echo "Current test coverage: $COVERAGE%" -echo "Required threshold: $THRESHOLD%" - -if (( $(echo "$COVERAGE < $THRESHOLD" | bc -l) )); then - echo "Test coverage is below threshold" - exit 1 -else - echo "Test coverage meets or exceeds threshold" - exit 0 -fi \ No newline at end of file diff --git a/scripts/entrypoint.sh b/scripts/entrypoint.sh deleted file mode 100644 index f307a2b..0000000 --- a/scripts/entrypoint.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/sh -set -e - -# Afficher la version -echo "Export Trakt 4 Letterboxd - Scheduler" -echo "======================================" - -# Si la variable EXPORT_SCHEDULE est définie, lancer le scheduler -if [ -n "$EXPORT_SCHEDULE" ]; then - echo "Schedule configured: $EXPORT_SCHEDULE" - echo "Export mode: ${EXPORT_MODE:-complete}" - echo "Export type: ${EXPORT_TYPE:-all}" - - # Lancer le programme avec la commande schedule - exec /app/export-trakt schedule -else - echo "No EXPORT_SCHEDULE defined. Exiting." - exit 1 -fi \ No newline at end of file diff --git a/setup_trakt.sh b/setup_trakt.sh new file mode 100755 index 0000000..e052480 --- /dev/null +++ b/setup_trakt.sh @@ -0,0 +1,98 @@ +#!/bin/bash +SCRIPT_DIR=$(dirname "$(realpath "$0")") + +# Detect OS for sed compatibility +if [[ "$OSTYPE" == "darwin"* ]]; then + # macOS uses BSD sed + SED_INPLACE="sed -i ''" +else + # Linux and others use GNU sed + SED_INPLACE="sed -i" +fi + +# Always use the config file from the config directory +CONFIG_DIR="${SCRIPT_DIR}/config" +if [ -f "/app/config/.config.cfg" ]; then + # If running in Docker, use the absolute path + CONFIG_FILE="/app/config/.config.cfg" +else + # If running locally, use the relative path + CONFIG_FILE="${CONFIG_DIR}/.config.cfg" +fi + +echo "=== Trakt Authentication Configuration ===" +echo "" +echo "This script will help you configure authentication with the Trakt API." +echo "" + +# Check if the configuration file exists +if [ ! -f "$CONFIG_FILE" ]; then + echo "Error: Configuration file does not exist at $CONFIG_FILE." + echo "Creating config directory if it doesn't exist..." + mkdir -p "$(dirname "$CONFIG_FILE")" + + # Check if example config exists and copy it + if [ -f "${SCRIPT_DIR}/.config.cfg.example" ]; then + echo "Copying example config to $CONFIG_FILE..." + cp "${SCRIPT_DIR}/.config.cfg.example" "$CONFIG_FILE" + elif [ -f "${CONFIG_DIR}/.config.cfg.example" ]; then + echo "Copying example config to $CONFIG_FILE..." + cp "${CONFIG_DIR}/.config.cfg.example" "$CONFIG_FILE" + else + echo "Error: No example configuration file found." + exit 1 + fi +fi + +# Request API information +echo "Step 1: Create an application at https://trakt.tv/oauth/applications" +echo " - Name: Export Trakt 4 Letterboxd" +echo " - Redirect URL: urn:ietf:wg:oauth:2.0:oob" +echo "" +read -p "Enter your Client ID (API Key): " API_KEY +read -p "Enter your Client Secret: " API_SECRET +read -p "Enter your Trakt Username: " USERNAME +echo "" + +# Update the configuration file +$SED_INPLACE "s|API_KEY=.*|API_KEY=\"$API_KEY\"|" "$CONFIG_FILE" +$SED_INPLACE "s|API_SECRET=.*|API_SECRET=\"$API_SECRET\"|" "$CONFIG_FILE" +$SED_INPLACE "s|REDIRECT_URI=.*|REDIRECT_URI=\"urn:ietf:wg:oauth:2.0:oob\"|" "$CONFIG_FILE" +$SED_INPLACE "s|USERNAME=.*|USERNAME=\"$USERNAME\"|" "$CONFIG_FILE" + +echo "Step 2: Get an authorization code" +echo "" +echo "Open the following link in your browser:" +echo "https://trakt.tv/oauth/authorize?response_type=code&client_id=${API_KEY}&redirect_uri=urn:ietf:wg:oauth:2.0:oob" +echo "" +read -p "Enter the displayed authorization code: " AUTH_CODE +echo "" + +# Get tokens +echo "Step 3: Getting access tokens..." +RESPONSE=$(curl -s -X POST "https://api.trakt.tv/oauth/token" \ + -H "Content-Type: application/json" \ + -d "{ + \"code\": \"${AUTH_CODE}\", + \"client_id\": \"${API_KEY}\", + \"client_secret\": \"${API_SECRET}\", + \"redirect_uri\": \"urn:ietf:wg:oauth:2.0:oob\", + \"grant_type\": \"authorization_code\" + }") + +ACCESS_TOKEN=$(echo "$RESPONSE" | jq -r '.access_token') +REFRESH_TOKEN=$(echo "$RESPONSE" | jq -r '.refresh_token') + +if [[ "$ACCESS_TOKEN" != "null" && "$REFRESH_TOKEN" != "null" && -n "$ACCESS_TOKEN" && -n "$REFRESH_TOKEN" ]]; then + # Update the configuration file + $SED_INPLACE "s|ACCESS_TOKEN=.*|ACCESS_TOKEN=\"$ACCESS_TOKEN\"|" "$CONFIG_FILE" + $SED_INPLACE "s|REFRESH_TOKEN=.*|REFRESH_TOKEN=\"$REFRESH_TOKEN\"|" "$CONFIG_FILE" + + echo "✅ Configuration completed successfully!" + echo "" + echo "You can now run the Export_Trakt_4_Letterboxd.sh script" +else + echo "❌ Error obtaining tokens." + echo "API response: $RESPONSE" + exit 1 +fi \ No newline at end of file diff --git a/tests/bats b/tests/bats new file mode 160000 index 0000000..3172a45 --- /dev/null +++ b/tests/bats @@ -0,0 +1 @@ +Subproject commit 3172a45e55a58bbaf952e971a4b7347483842ba7 diff --git a/tests/helpers/bats-assert b/tests/helpers/bats-assert new file mode 160000 index 0000000..b93143a --- /dev/null +++ b/tests/helpers/bats-assert @@ -0,0 +1 @@ +Subproject commit b93143a1bfbde41d9b7343aab0d36f3ef6549e6b diff --git a/tests/helpers/bats-file b/tests/helpers/bats-file new file mode 160000 index 0000000..c0e3a26 --- /dev/null +++ b/tests/helpers/bats-file @@ -0,0 +1 @@ +Subproject commit c0e3a260d2901d5045e164e7737238a19db2d526 diff --git a/tests/helpers/bats-support b/tests/helpers/bats-support new file mode 160000 index 0000000..d007fc1 --- /dev/null +++ b/tests/helpers/bats-support @@ -0,0 +1 @@ +Subproject commit d007fc1f451abbad55204fa9c9eb3e6ed5dc5f61 diff --git a/tests/integration/export_process_test.bats b/tests/integration/export_process_test.bats new file mode 100644 index 0000000..f99c397 --- /dev/null +++ b/tests/integration/export_process_test.bats @@ -0,0 +1,112 @@ +#!/usr/bin/env bats + +# Load the testing helper with a simple relative path +load "../test_helper" + +# Load the mock API functions +source "${TESTS_DIR}/mocks/trakt_api_mock.sh" + +# Set up the test environment for integration tests +setup_integration_test() { + # Create mock config file + create_mock_config + + # Copy the mock data to the test backup directory + mkdir -p "${TEST_TEMP_DIR}/backup" + cp "${MOCKS_DIR}/ratings.json" "${TEST_TEMP_DIR}/backup/ratings_movies.json" + cp "${MOCKS_DIR}/history.json" "${TEST_TEMP_DIR}/backup/history_movies.json" + cp "${MOCKS_DIR}/watchlist.json" "${TEST_TEMP_DIR}/backup/watchlist_movies.json" + + # Create an expected output directory + mkdir -p "${TEST_TEMP_DIR}/copy" +} + +# Test simple export process +@test "Integration: Basic export process should create CSV files" { + # Set up the integration test environment + setup_integration_test + + # Create temporary copy of the main script for testing + cat > "${TEST_TEMP_DIR}/export_test.sh" << EOF +#!/bin/bash + +# Override directories for testing +export SCRIPT_DIR="${TEST_TEMP_DIR}" +export CONFIG_DIR="${TEST_TEMP_DIR}/config" +export LOG_DIR="${TEST_TEMP_DIR}/logs" +export COPY_DIR="${TEST_TEMP_DIR}/copy" +export TEMP_DIR="${TEST_TEMP_DIR}/TEMP" +export BACKUP_DIR="${TEST_TEMP_DIR}/backup" + +# Enable test mode +export TEST_MODE="true" + +# Source the library files +source "${LIB_DIR}/config.sh" +source "${LIB_DIR}/utils.sh" +source "${TESTS_DIR}/mocks/trakt_api_mock.sh" +source "${LIB_DIR}/data_processing.sh" + +# Create log file +LOG_FILE="${LOG_DIR}/export_test.log" +mkdir -p "${LOG_DIR}" +touch "\${LOG_FILE}" + +# Initialize directories +ensure_directories "\${LOG_DIR}" "\${COPY_DIR}" "\${LOG_FILE}" +init_temp_dir "\${TEMP_DIR}" "\${LOG_FILE}" +init_backup_dir "\${BACKUP_DIR}" "\${LOG_FILE}" + +# Process ratings (simplified for test) +echo "Processing ratings..." | tee -a "\${LOG_FILE}" +RATINGS_FILE="\${BACKUP_DIR}/ratings_movies.json" +RATINGS_LOOKUP="\${TEMP_DIR}/ratings_lookup.json" +create_ratings_lookup "\${RATINGS_FILE}" "\${RATINGS_LOOKUP}" "\${LOG_FILE}" + +# Process history (simplified for test) +echo "Processing history..." | tee -a "\${LOG_FILE}" +HISTORY_FILE="\${BACKUP_DIR}/history_movies.json" + +# Create CSV header for ratings +echo "Title,Year,Directors,Rating,WatchedDate" > "\${COPY_DIR}/ratings.csv" + +# Extract movie information from ratings +jq -r '.[] | "\(.movie.title),\(.movie.year),,\(.rating),\(.rated_at)"' "\${RATINGS_FILE}" >> "\${COPY_DIR}/ratings.csv" + +# Create CSV header for watchlist +echo "Title,Year,Directors" > "\${COPY_DIR}/watchlist.csv" + +# Extract movie information from watchlist +jq -r '.[] | "\(.movie.title),\(.movie.year),"' "\${BACKUP_DIR}/watchlist_movies.json" >> "\${COPY_DIR}/watchlist.csv" + +echo "Export completed successfully" | tee -a "\${LOG_FILE}" +exit 0 +EOF + + # Make the test script executable + chmod +x "${TEST_TEMP_DIR}/export_test.sh" + + # Run the test script + run "${TEST_TEMP_DIR}/export_test.sh" + + # Check it was successful + assert_success + + # Check if the output files were created + assert_file_exists "${TEST_TEMP_DIR}/copy/ratings.csv" + assert_file_exists "${TEST_TEMP_DIR}/copy/watchlist.csv" + + # Check content of ratings.csv + run grep "Inception,2010,,8," "${TEST_TEMP_DIR}/copy/ratings.csv" + assert_success + + run grep "The Shawshank Redemption,1994,,9," "${TEST_TEMP_DIR}/copy/ratings.csv" + assert_success + + # Check content of watchlist.csv + run grep "Dune,2021," "${TEST_TEMP_DIR}/copy/watchlist.csv" + assert_success + + run grep "Oppenheimer,2023," "${TEST_TEMP_DIR}/copy/watchlist.csv" + assert_success +} \ No newline at end of file diff --git a/tests/mocks/history.json b/tests/mocks/history.json new file mode 100644 index 0000000..c7c4619 --- /dev/null +++ b/tests/mocks/history.json @@ -0,0 +1,50 @@ +[ + { + "id": 123456789, + "watched_at": "2023-06-20T19:30:15.000Z", + "action": "watch", + "type": "movie", + "movie": { + "title": "Inception", + "year": 2010, + "ids": { + "trakt": 16662, + "slug": "inception-2010", + "imdb": "tt1375666", + "tmdb": 27205 + } + } + }, + { + "id": 123456790, + "watched_at": "2023-06-19T20:15:30.000Z", + "action": "watch", + "type": "movie", + "movie": { + "title": "The Matrix", + "year": 1999, + "ids": { + "trakt": 481, + "slug": "the-matrix-1999", + "imdb": "tt0133093", + "tmdb": 603 + } + } + }, + { + "id": 123456791, + "watched_at": "2023-06-18T21:45:10.000Z", + "action": "watch", + "type": "movie", + "movie": { + "title": "Pulp Fiction", + "year": 1994, + "ids": { + "trakt": 545, + "slug": "pulp-fiction-1994", + "imdb": "tt0110912", + "tmdb": 680 + } + } + } +] diff --git a/tests/mocks/ratings.json b/tests/mocks/ratings.json new file mode 100644 index 0000000..b6ddd1f --- /dev/null +++ b/tests/mocks/ratings.json @@ -0,0 +1,47 @@ +[ + { + "rated_at": "2023-06-15T21:36:49.000Z", + "rating": 8, + "type": "movie", + "movie": { + "title": "Inception", + "year": 2010, + "ids": { + "trakt": 16662, + "slug": "inception-2010", + "imdb": "tt1375666", + "tmdb": 27205 + } + } + }, + { + "rated_at": "2023-06-16T18:22:13.000Z", + "rating": 9, + "type": "movie", + "movie": { + "title": "The Shawshank Redemption", + "year": 1994, + "ids": { + "trakt": 231, + "slug": "the-shawshank-redemption-1994", + "imdb": "tt0111161", + "tmdb": 278 + } + } + }, + { + "rated_at": "2023-06-17T20:45:30.000Z", + "rating": 7, + "type": "movie", + "movie": { + "title": "The Dark Knight", + "year": 2008, + "ids": { + "trakt": 120, + "slug": "the-dark-knight-2008", + "imdb": "tt0468569", + "tmdb": 155 + } + } + } +] diff --git a/tests/mocks/trakt_api_mock.sh b/tests/mocks/trakt_api_mock.sh new file mode 100644 index 0000000..dc23322 --- /dev/null +++ b/tests/mocks/trakt_api_mock.sh @@ -0,0 +1,77 @@ +#!/usr/bin/env bash +# Mock functions for trakt_api.sh + +# Mock for get_trakt_ratings +get_trakt_ratings() { + local type="$1" + local output_file="$2" + + if [[ "$TEST_MODE" == "true" ]]; then + # In test mode, use the mock data + cp "${MOCKS_DIR}/ratings.json" "$output_file" + return 0 + fi + + # Original function would be called here if not in test mode + return 1 +} + +# Mock for get_trakt_history +get_trakt_history() { + local start_date="$1" + local output_file="$2" + + if [[ "$TEST_MODE" == "true" ]]; then + # In test mode, use the mock data + cp "${MOCKS_DIR}/history.json" "$output_file" + return 0 + fi + + # Original function would be called here if not in test mode + return 1 +} + +# Mock for get_trakt_watchlist +get_trakt_watchlist() { + local type="$1" + local output_file="$2" + + if [[ "$TEST_MODE" == "true" ]]; then + # In test mode, use the mock data + cp "${MOCKS_DIR}/watchlist.json" "$output_file" + return 0 + fi + + # Original function would be called here if not in test mode + return 1 +} + +# Mock for refresh_token +refresh_token() { + if [[ "$TEST_MODE" == "true" ]]; then + # In test mode, just pretend we refreshed the token + echo "Token refreshed (mock)" + return 0 + fi + + # Original function would be called here if not in test mode + return 1 +} + +# Mock for check_token_validity +check_token_validity() { + if [[ "$TEST_MODE" == "true" ]]; then + # In test mode, pretend the token is valid + return 0 + fi + + # Original function would be called here if not in test mode + return 1 +} + +# Export the mock functions +export -f get_trakt_ratings +export -f get_trakt_history +export -f get_trakt_watchlist +export -f refresh_token +export -f check_token_validity \ No newline at end of file diff --git a/tests/mocks/watchlist.json b/tests/mocks/watchlist.json new file mode 100644 index 0000000..2fbbd33 --- /dev/null +++ b/tests/mocks/watchlist.json @@ -0,0 +1,44 @@ +[ + { + "listed_at": "2023-06-22T14:25:45.000Z", + "type": "movie", + "movie": { + "title": "Dune", + "year": 2021, + "ids": { + "trakt": 506740, + "slug": "dune-2021", + "imdb": "tt1160419", + "tmdb": 438631 + } + } + }, + { + "listed_at": "2023-06-21T10:15:30.000Z", + "type": "movie", + "movie": { + "title": "Oppenheimer", + "year": 2023, + "ids": { + "trakt": 582730, + "slug": "oppenheimer-2023", + "imdb": "tt15398776", + "tmdb": 872585 + } + } + }, + { + "listed_at": "2023-06-20T08:42:15.000Z", + "type": "movie", + "movie": { + "title": "The Batman", + "year": 2022, + "ids": { + "trakt": 567189, + "slug": "the-batman-2022", + "imdb": "tt1877830", + "tmdb": 414906 + } + } + } +] diff --git a/tests/run_tests.sh b/tests/run_tests.sh new file mode 100755 index 0000000..286d275 --- /dev/null +++ b/tests/run_tests.sh @@ -0,0 +1,103 @@ +#!/usr/bin/env bash +# +# Run all tests and generate coverage report +# + +# Get the script directory +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "$(dirname "${SCRIPT_DIR}")" && pwd)" +BATS_DIR="${SCRIPT_DIR}/bats" +TEST_RESULTS_DIR="${REPO_ROOT}/test-results" + +# Create test results directory +mkdir -p "${TEST_RESULTS_DIR}" + +# Output title +echo "🧪 Running tests for Export_Trakt_4_Letterboxd..." +echo "==============================================" + +# Check if we have the required tools +check_dependencies() { + local missing=0 + + # Check for bats + if [ ! -d "${BATS_DIR}" ]; then + echo "❌ bats-core not found (run: git submodule update --init --recursive)" + missing=1 + fi + + # Check for jq + if ! command -v jq &> /dev/null; then + echo "❌ jq not found (install with your package manager)" + missing=1 + fi + + # Check for kcov if coverage is requested but don't fail + if [ "$1" = "coverage" ] && ! command -v kcov &> /dev/null; then + echo "⚠️ kcov not found - coverage report will be skipped" + export SKIP_COVERAGE=1 + fi + + if [ $missing -eq 1 ]; then + echo "Please install the missing dependencies and try again." + exit 1 + fi +} + +# Run the tests +run_tests() { + echo "🔍 Running unit tests..." + "${BATS_DIR}/bin/bats" "${SCRIPT_DIR}/unit" | tee "${TEST_RESULTS_DIR}/unit_tests.log" + local unit_status=${PIPESTATUS[0]} + + echo -e "\n🔍 Running integration tests..." + "${BATS_DIR}/bin/bats" "${SCRIPT_DIR}/integration" | tee "${TEST_RESULTS_DIR}/integration_tests.log" + local integration_status=${PIPESTATUS[0]} + + # Return success only if all tests passed + if [ $unit_status -eq 0 ] && [ $integration_status -eq 0 ]; then + echo -e "\n✅ All tests passed!" + return 0 + else + echo -e "\n❌ Some tests failed!" + return 1 + fi +} + +# Generate coverage report +generate_coverage() { + if [ "${SKIP_COVERAGE}" = "1" ]; then + echo -e "\n⏩ Skipping coverage report generation (kcov not installed)" + return 0 + fi + + echo -e "\n📊 Generating test coverage report..." + + # Create coverage directory + mkdir -p "${TEST_RESULTS_DIR}/coverage" + + # Run the tests with kcov for coverage reporting + kcov --include-path="${REPO_ROOT}/lib" \ + "${TEST_RESULTS_DIR}/coverage" \ + "${BATS_DIR}/bin/bats" "${SCRIPT_DIR}/unit" "${SCRIPT_DIR}/integration" + + echo "Coverage report generated at: ${TEST_RESULTS_DIR}/coverage/index.html" +} + +# Main execution +check_dependencies "$1" + +# Run the tests +run_tests +TEST_STATUS=$? + +# Generate coverage if requested +if [ "$1" = "coverage" ]; then + generate_coverage +fi + +echo -e "\n📝 Test summary:" +echo "Unit tests: $(grep "tests," "${TEST_RESULTS_DIR}/unit_tests.log" | tail -n 1)" +echo "Integration tests: $(grep "tests," "${TEST_RESULTS_DIR}/integration_tests.log" | tail -n 1)" + +exit $TEST_STATUS \ No newline at end of file diff --git a/tests/test_helper.bash b/tests/test_helper.bash new file mode 100644 index 0000000..7931576 --- /dev/null +++ b/tests/test_helper.bash @@ -0,0 +1,77 @@ +#!/usr/bin/env bash + +# Determine directory containing this script +TEST_HELPER_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" + +# Set up the test environment +REPO_ROOT="$( cd "${TEST_HELPER_DIR}/.." >/dev/null 2>&1 && pwd )" +LIB_DIR="${REPO_ROOT}/lib" +TESTS_DIR="${REPO_ROOT}/tests" +MOCKS_DIR="${TESTS_DIR}/mocks" +TEST_DATA_DIR="${TESTS_DIR}/data" + +# Export directories for use in test files +export REPO_ROOT +export LIB_DIR +export TESTS_DIR +export MOCKS_DIR +export TEST_DATA_DIR + +# Load testing libraries - using relative paths from this file +load "${TEST_HELPER_DIR}/helpers/bats-support/load" +load "${TEST_HELPER_DIR}/helpers/bats-assert/load" +load "${TEST_HELPER_DIR}/helpers/bats-file/load" + +# Setup the test environment before each test +setup() { + # Create a temporary directory for test artifacts + TEST_TEMP_DIR="$(mktemp -d)" + export TEST_TEMP_DIR + + # Set up environment variables for testing + export TRAKT_CLIENT_ID="test_client_id" + export TRAKT_CLIENT_SECRET="test_client_secret" + export TRAKT_REDIRECT_URI="urn:ietf:wg:oauth:2.0:oob" + export TEST_MODE="true" + + # Create mock directories mirroring the main project structure + mkdir -p "${TEST_TEMP_DIR}/config" + mkdir -p "${TEST_TEMP_DIR}/logs" + mkdir -p "${TEST_TEMP_DIR}/backup" + mkdir -p "${TEST_TEMP_DIR}/TEMP" + mkdir -p "${TEST_TEMP_DIR}/copy" +} + +# Clean up after each test +teardown() { + # Remove the temporary directory contents first + if [ -d "${TEST_TEMP_DIR}" ]; then + # First try to remove all contents + find "${TEST_TEMP_DIR}" -mindepth 1 -delete 2>/dev/null + + # Then try to remove the directory itself + rmdir "${TEST_TEMP_DIR}" 2>/dev/null || true + fi +} + +# Helper function to create a mock config file +create_mock_config() { + cat > "${TEST_TEMP_DIR}/config/.config.cfg" << EOF +TRAKT_CLIENT_ID="test_client_id" +TRAKT_CLIENT_SECRET="test_client_secret" +TRAKT_ACCESS_TOKEN="test_access_token" +TRAKT_REFRESH_TOKEN="test_refresh_token" +TRAKT_EXPIRES_IN="7889238" +TRAKT_CREATED_AT="1600000000" +DEBUG_MODE="true" +LOG_LEVEL="DEBUG" +EOF +} + +# Helper function to load a mock JSON response +load_mock_response() { + local mock_file="$1" + local target_file="$2" + + cp "${MOCKS_DIR}/${mock_file}" "${target_file}" +} \ No newline at end of file diff --git a/tests/unit/config_test.bats b/tests/unit/config_test.bats new file mode 100644 index 0000000..10fd7cc --- /dev/null +++ b/tests/unit/config_test.bats @@ -0,0 +1,153 @@ +#!/usr/bin/env bats + +# Load the testing helper with a simple relative path +load "../test_helper" + +# Test initialization of temporary directory +@test "init_temp_dir creates and cleans directory" { + # Source the function under test + source "${LIB_DIR}/config.sh" + + # Create a test directory + local test_dir="${TEST_TEMP_DIR}/init_test" + mkdir -p "${test_dir}" + + # Add some files + touch "${test_dir}/file1.txt" + mkdir -p "${test_dir}/subdir" + touch "${test_dir}/subdir/file2.txt" + + # Create a log file + local log_file="${TEST_TEMP_DIR}/test.log" + touch "${log_file}" + + # Run the function under test + run init_temp_dir "${test_dir}" "${log_file}" + + # Check that it succeeded + assert_success + + # Check that directory exists but is empty + assert_dir_exists "${test_dir}" + run find "${test_dir}" -type f + assert_output "" +} + +@test "ensure_directories creates directories when they don't exist" { + # Source the function under test + source "${LIB_DIR}/config.sh" + + # Define test directories + local dir1="${TEST_TEMP_DIR}/dir1" + local dir2="${TEST_TEMP_DIR}/dir2" + + # Create a log file + local log_file="${TEST_TEMP_DIR}/test.log" + touch "${log_file}" + + # Ensure the directories don't exist initially + rm -rf "${dir1}" "${dir2}" + + # Run the function under test + run ensure_directories "${dir1}" "${dir2}" "${log_file}" + + # Check that it succeeded + assert_success + + # Check that directories were created + assert_dir_exists "${dir1}" + assert_dir_exists "${dir2}" +} + +@test "detect_os_sed returns correct sed command for OS" { + # Source the function under test + source "${LIB_DIR}/config.sh" + + # Create a log file + local log_file="${TEST_TEMP_DIR}/test.log" + touch "${log_file}" + + # Run the function under test + run detect_os_sed "${log_file}" + + # Check that it succeeded + assert_success + + # On macOS, it should return 'sed -i "" "s///"' + # On Linux, it should return 'sed -i "s///"' + if [[ "$OSTYPE" == "darwin"* ]]; then + assert_output --partial "sed -i ''" + else + assert_output --partial "sed -i" + fi +} + +@test "init_backup_dir creates and preserves directory" { + # Source the function under test + source "${LIB_DIR}/config.sh" + + # Create a test backup directory + local backup_dir="${TEST_TEMP_DIR}/backup_test" + mkdir -p "${backup_dir}" + + # Create a log file + local log_file="${TEST_TEMP_DIR}/test.log" + touch "${log_file}" + + # Add a file that should be preserved + touch "${backup_dir}/ratings_movies.json" + + # Run the function under test + run init_backup_dir "${backup_dir}" "${log_file}" + + # Check that it succeeded + assert_success + + # Check that directory exists + assert_dir_exists "${backup_dir}" + + # Check that the existing file was preserved + assert_file_exists "${backup_dir}/ratings_movies.json" +} + +@test "load_config loads configuration values" { + # Create a mock config file with environment variables + cat > "${TEST_TEMP_DIR}/config/.config.cfg" << EOF +TRAKT_CLIENT_ID="test_client_id" +TRAKT_CLIENT_SECRET="test_client_secret" +TRAKT_ACCESS_TOKEN="test_access_token" +TRAKT_REFRESH_TOKEN="test_refresh_token" +TRAKT_EXPIRES_IN="7889238" +TRAKT_CREATED_AT="1600000000" +DEBUG_MODE="true" +LOG_LEVEL="DEBUG" +EOF + + # Create a log file + local log_file="${TEST_TEMP_DIR}/test.log" + touch "${log_file}" + + # Define a wrapper function that sources config.sh and calls load_config + test_load_config() { + source "${LIB_DIR}/config.sh" + load_config "${TEST_TEMP_DIR}" "${log_file}" + echo "TRAKT_CLIENT_ID=${TRAKT_CLIENT_ID}" + echo "TRAKT_CLIENT_SECRET=${TRAKT_CLIENT_SECRET}" + echo "TRAKT_ACCESS_TOKEN=${TRAKT_ACCESS_TOKEN}" + echo "TRAKT_REFRESH_TOKEN=${TRAKT_REFRESH_TOKEN}" + echo "DEBUG_MODE=${DEBUG_MODE}" + } + + # Run the wrapper function + run test_load_config + + # Check that it succeeded + assert_success + + # Check that config values were loaded correctly + assert_output --partial "TRAKT_CLIENT_ID=test_client_id" + assert_output --partial "TRAKT_CLIENT_SECRET=test_client_secret" + assert_output --partial "TRAKT_ACCESS_TOKEN=test_access_token" + assert_output --partial "TRAKT_REFRESH_TOKEN=test_refresh_token" + assert_output --partial "DEBUG_MODE=true" +} \ No newline at end of file diff --git a/tests/unit/data_processing_test.bats b/tests/unit/data_processing_test.bats new file mode 100644 index 0000000..4f76ae9 --- /dev/null +++ b/tests/unit/data_processing_test.bats @@ -0,0 +1,218 @@ +#!/usr/bin/env bats + +# Load the testing helper +load '../test_helper' + +# Load the actual code under test +load "${LIB_DIR}/data_processing.sh" + +# Setup the test data +setup_test_data() { + # Create a test ratings file + cat > "${TEST_TEMP_DIR}/test_ratings.json" << EOF +[ + { + "rated_at": "2023-06-15T21:36:49.000Z", + "rating": 8, + "type": "movie", + "movie": { + "title": "Inception", + "year": 2010, + "ids": { + "trakt": 16662, + "slug": "inception-2010", + "imdb": "tt1375666", + "tmdb": 27205 + } + } + }, + { + "rated_at": "2023-06-16T18:22:13.000Z", + "rating": 9, + "type": "movie", + "movie": { + "title": "The Shawshank Redemption", + "year": 1994, + "ids": { + "trakt": 231, + "slug": "the-shawshank-redemption-1994", + "imdb": "tt0111161", + "tmdb": 278 + } + } + } +] +EOF + + # Create a test watched movies file + cat > "${TEST_TEMP_DIR}/test_watched.json" << EOF +[ + { + "plays": 2, + "last_watched_at": "2023-06-15T21:36:49.000Z", + "movie": { + "title": "Inception", + "year": 2010, + "ids": { + "trakt": 16662, + "slug": "inception-2010", + "imdb": "tt1375666", + "tmdb": 27205 + } + } + }, + { + "plays": 1, + "last_watched_at": "2023-06-16T18:22:13.000Z", + "movie": { + "title": "The Matrix", + "year": 1999, + "ids": { + "trakt": 481, + "slug": "the-matrix-1999", + "imdb": "tt0133093", + "tmdb": 603 + } + } + } +] +EOF + + # Create a test history file + cat > "${TEST_TEMP_DIR}/test_history.json" << EOF +[ + { + "id": 123456789, + "watched_at": "2023-06-20T19:30:15.000Z", + "action": "watch", + "type": "movie", + "movie": { + "title": "Inception", + "year": 2010, + "ids": { + "trakt": 16662, + "slug": "inception-2010", + "imdb": "tt1375666", + "tmdb": 27205 + } + } + }, + { + "id": 123456790, + "watched_at": "2023-06-19T20:15:30.000Z", + "action": "watch", + "type": "movie", + "movie": { + "title": "The Matrix", + "year": 1999, + "ids": { + "trakt": 481, + "slug": "the-matrix-1999", + "imdb": "tt0133093", + "tmdb": 603 + } + } + } +] +EOF +} + +# Test for create_ratings_lookup function +@test "create_ratings_lookup should create a lookup JSON from ratings" { + # Setup test data + setup_test_data + + # Files for the test + local ratings_file="${TEST_TEMP_DIR}/test_ratings.json" + local output_file="${TEST_TEMP_DIR}/ratings_lookup.json" + local log_file="${TEST_TEMP_DIR}/test.log" + touch "$log_file" + + # Run the function + run create_ratings_lookup "$ratings_file" "$output_file" "$log_file" + + # Check the return code + assert_success + + # Check if the output file exists + assert_file_exists "$output_file" + + # Check if it contains the expected data + run jq -r '.["16662"]' "$output_file" + assert_output "8" + + run jq -r '.["231"]' "$output_file" + assert_output "9" +} + +# Test for create_ratings_lookup with missing file +@test "create_ratings_lookup should create empty lookup when file is missing" { + # Files for the test + local ratings_file="${TEST_TEMP_DIR}/missing_file.json" + local output_file="${TEST_TEMP_DIR}/empty_ratings_lookup.json" + local log_file="${TEST_TEMP_DIR}/test.log" + touch "$log_file" + + # Run the function + run create_ratings_lookup "$ratings_file" "$output_file" "$log_file" + + # Should return error code + assert_failure + + # Check if the output file exists with empty JSON + assert_file_exists "$output_file" + + # Check if it contains empty JSON + run cat "$output_file" + assert_output "{}" +} + +# Test for create_plays_count_lookup function +@test "create_plays_count_lookup should create a lookup JSON from watched" { + # Setup test data + setup_test_data + + # Files for the test + local watched_file="${TEST_TEMP_DIR}/test_watched.json" + local output_file="${TEST_TEMP_DIR}/plays_lookup.json" + local log_file="${TEST_TEMP_DIR}/test.log" + touch "$log_file" + + # Run the function + run create_plays_count_lookup "$watched_file" "$output_file" "$log_file" + + # Check the return code + assert_success + + # Check if the output file exists + assert_file_exists "$output_file" + + # Check if it contains the expected data + run jq -r '.["tt1375666"]' "$output_file" + assert_output "2" + + run jq -r '.["tt0133093"]' "$output_file" + assert_output "1" +} + +# Test for create_plays_count_lookup with missing file +@test "create_plays_count_lookup should create empty lookup when file is missing" { + # Files for the test + local watched_file="${TEST_TEMP_DIR}/missing_file.json" + local output_file="${TEST_TEMP_DIR}/empty_plays_lookup.json" + local log_file="${TEST_TEMP_DIR}/test.log" + touch "$log_file" + + # Run the function + run create_plays_count_lookup "$watched_file" "$output_file" "$log_file" + + # Should return error code + assert_failure + + # Check if the output file exists with empty JSON + assert_file_exists "$output_file" + + # Check if it contains empty JSON + run cat "$output_file" + assert_output "{}" +} \ No newline at end of file diff --git a/tests/unit/trakt_api_test.bats b/tests/unit/trakt_api_test.bats new file mode 100644 index 0000000..f6bf8af --- /dev/null +++ b/tests/unit/trakt_api_test.bats @@ -0,0 +1,103 @@ +#!/usr/bin/env bats + +# Load the testing helper +load '../test_helper' + +# Load the API mock functions +source "${TESTS_DIR}/mocks/trakt_api_mock.sh" + +# Test for API mocking of get_trakt_ratings +@test "get_trakt_ratings should return ratings from mock data" { + # Set up test mode and output file + export TEST_MODE="true" + local output_file="${TEST_TEMP_DIR}/test_ratings.json" + + # Call function + get_trakt_ratings "movies" "$output_file" + + # Verify the file was created with mock data + assert_file_exists "$output_file" + + # Check if the file contains expected data + run jq -r '.[0].movie.title' "$output_file" + assert_output "Inception" + + run jq -r '.[1].movie.title' "$output_file" + assert_output "The Shawshank Redemption" + + run jq -r '.[2].movie.title' "$output_file" + assert_output "The Dark Knight" +} + +# Test for API mocking of get_trakt_history +@test "get_trakt_history should return history from mock data" { + # Set up test mode and output file + export TEST_MODE="true" + local output_file="${TEST_TEMP_DIR}/test_history.json" + + # Call function + get_trakt_history "2023-01-01" "$output_file" + + # Verify the file was created with mock data + assert_file_exists "$output_file" + + # Check if the file contains expected data + run jq -r '.[0].movie.title' "$output_file" + assert_output "Inception" + + run jq -r '.[1].movie.title' "$output_file" + assert_output "The Matrix" + + run jq -r '.[2].movie.title' "$output_file" + assert_output "Pulp Fiction" +} + +# Test for API mocking of get_trakt_watchlist +@test "get_trakt_watchlist should return watchlist from mock data" { + # Set up test mode and output file + export TEST_MODE="true" + local output_file="${TEST_TEMP_DIR}/test_watchlist.json" + + # Call function + get_trakt_watchlist "movies" "$output_file" + + # Verify the file was created with mock data + assert_file_exists "$output_file" + + # Check if the file contains expected data + run jq -r '.[0].movie.title' "$output_file" + assert_output "Dune" + + run jq -r '.[1].movie.title' "$output_file" + assert_output "Oppenheimer" + + run jq -r '.[2].movie.title' "$output_file" + assert_output "The Batman" +} + +# Test for check_token_validity +@test "check_token_validity should return success in test mode" { + # Set up test mode + export TEST_MODE="true" + + # Call function + run check_token_validity + + # It should return success (0) in test mode + assert_success +} + +# Test for refresh_token +@test "refresh_token should return success message in test mode" { + # Set up test mode + export TEST_MODE="true" + + # Call function + run refresh_token + + # It should return success (0) in test mode + assert_success + + # It should output the mock message + assert_output "Token refreshed (mock)" +} \ No newline at end of file diff --git a/wiki b/wiki index c66b5c3..60e8f00 160000 --- a/wiki +++ b/wiki @@ -1 +1 @@ -Subproject commit c66b5c3d9962a54bad4677fa9199939a53795909 +Subproject commit 60e8f0043bd44ca77d1dd52706cc0911116b5d8e