diff --git a/.github/release-drafter-pre-processing.yml b/.github/release-drafter-pre-processing.yml new file mode 100644 index 00000000..e9feffac --- /dev/null +++ b/.github/release-drafter-pre-processing.yml @@ -0,0 +1,44 @@ +name-template: 'pre-processing-v$RESOLVED_VERSION' +tag-template: 'pre-processing-v$RESOLVED_VERSION' + +categories: + - title: '๐Ÿš€ Features' + labels: + - 'enhancement' + - 'pre-processing' + - title: '๐Ÿ› Bug Fixes' + labels: + - 'bug' + - 'pre-processing' + - title: '๐Ÿงฐ Maintenance' + labels: + - 'maintenance' + - 'pre-processing' + +change-template: '- $TITLE @$AUTHOR (#$NUMBER)' +change-title-escapes: '\<*_&' + +version-resolver: + major: + labels: + - 'major' + - 'breaking-change' + - 'pre-processing' + minor: + labels: + - 'minor' + - 'enhancement' + - 'pre-processing' + patch: + labels: + - 'patch' + - 'bug' + - 'pre-processing' + default: patch + +template: | + ## What's Changed + + $CHANGES + + **Full Changelog**: https://github.com/$OWNER/$REPOSITORY/compare/$PREVIOUS_TAG...pre-processing-v$RESOLVED_VERSION diff --git a/.github/release-drafter-user-service.yml b/.github/release-drafter-user-service.yml new file mode 100644 index 00000000..7259f4b4 --- /dev/null +++ b/.github/release-drafter-user-service.yml @@ -0,0 +1,44 @@ +name-template: 'user-service-v$RESOLVED_VERSION' +tag-template: 'user-service-v$RESOLVED_VERSION' + +categories: + - title: '๐Ÿš€ Features' + labels: + - 'enhancement' + - 'user-service' + - title: '๐Ÿ› Bug Fixes' + labels: + - 'bug' + - 'user-service' + - title: '๐Ÿงฐ Maintenance' + labels: + - 'maintenance' + - 'user-service' + +change-template: '- $TITLE @$AUTHOR (#$NUMBER)' +change-title-escapes: '\<*_&' + +version-resolver: + major: + labels: + - 'major' + - 'breaking-change' + - 'user-service' + minor: + labels: + - 'minor' + - 'enhancement' + - 'user-service' + patch: + labels: + - 'patch' + - 'bug' + - 'user-service' + default: patch + +template: | + ## What's Changed + + $CHANGES + + **Full Changelog**: https://github.com/$OWNER/$REPOSITORY/compare/$PREVIOUS_TAG...user-service-v$RESOLVED_VERSION \ No newline at end of file diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml deleted file mode 100644 index d7b5b424..00000000 --- a/.github/release-drafter.yml +++ /dev/null @@ -1,25 +0,0 @@ -name-template: 'v$RESOLVED_VERSION' -tag-template: 'v$RESOLVED_VERSION' -categories: - - title: 'Feature' - labels: - - 'enhancement' - - title: 'Bug Fixes' - labels: - - 'bug' -change-template: '- $TITLE @$AUTHOR (#$NUMBER)' -change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks. -version-resolver: - major: - labels: - - 'major' - minor: - labels: - - 'minor' - patch: - labels: - - 'patch' - default: patch -template: | - ## Changes - $CHANGES \ No newline at end of file diff --git a/.github/workflows/ci-java.yml b/.github/workflows/ci-java.yml index 3f1a3b76..a0a7ebbb 100644 --- a/.github/workflows/ci-java.yml +++ b/.github/workflows/ci-java.yml @@ -2,8 +2,8 @@ name: CI (Java) on: push: - branches: - - main + tags: + - 'user-service-v*' paths: - "apps/user-service/**" pull_request: @@ -21,10 +21,12 @@ permissions: security-events: write checks: write pull-requests: write + pages: write # GitHub Pages ๋ฐฐํฌ + id-token: write # GitHub Pages ๋ฐฐํฌ jobs: spotless-check: - if: github.event.pull_request.draft == false + if: github.event_name == 'pull_request' && github.event.pull_request.draft == false name: Lint Check runs-on: ubuntu-latest @@ -53,7 +55,7 @@ jobs: needs: spotless-check strategy: matrix: - java-version: [ "21" ] + java-version: ["21"] steps: - name: Checkout repository @@ -76,26 +78,44 @@ jobs: - name: Run Tests run: | - if [ "${{ github.base_ref }}" == "main" ]; then - ./gradlew allTests - else - ./gradlew test + ./gradlew unitTest + ./gradlew integrationTest + if [ "${{ github.base_ref }}" = "main" ]; then + ./gradlew e2eTest fi working-directory: apps/user-service - name: Upload build artifacts - if: matrix.java-version == '21' && github.ref == 'refs/heads/main' && github.event_name == 'push' + if: matrix.java-version == '21' && github.ref == 'refs/heads/main' uses: actions/upload-artifact@v4 with: - name: build-artifacts + name: build-artifacts-${{ github.run_id }}-${{ github.run_attempt }} path: apps/user-service/build/libs/ + - name: Upload OpenAPI spec artifacts + if: matrix.java-version == '21' && github.ref == 'refs/heads/main' + uses: actions/upload-artifact@v4 + with: + name: openapi-spec-${{ github.run_id }}-${{ github.run_attempt }} + path: apps/user-service/build/api-spec/ + + set-image-tag: + name: Set IMAGE_TAG + runs-on: ubuntu-latest + if: startsWith(github.ref, 'refs/tags/user-service-v') + steps: + - name: Extract version from tag + run: | + IMAGE_TAG="${GITHUB_REF#refs/tags/user-service-}" + echo "IMAGE_TAG=$IMAGE_TAG" >> $GITHUB_ENV + docker: - name: Build Spring Boot Docker Image and push to registry + name: Build Spring Boot Docker Image and push runs-on: ubuntu-latest - if: github.ref == 'refs/heads/main' && github.event_name == 'push' needs: - build + - set-image-tag + if: startsWith(github.ref, 'refs/tags/user-service-v') steps: - name: Checkout repository @@ -104,7 +124,7 @@ jobs: - name: Download build artifacts (JAR) uses: actions/download-artifact@v4 with: - name: build-artifacts + name: build-artifacts-${{ github.run_id }}-${{ github.run_attempt }} path: apps/user-service/build/libs/ - name: Login to Docker Registry @@ -123,10 +143,41 @@ jobs: context: ./apps/user-service push: true tags: | + ghcr.io/${{ env.REPO_LC }}/user-service:${{ env.IMAGE_TAG }} ghcr.io/${{ env.REPO_LC }}/user-service:latest - ghcr.io/${{ env.REPO_LC }}/user-service:${{ github.sha }} - name: Analyze image layers run: | echo "=== Image Layer Analysis ===" - docker history ghcr.io/${{ env.REPO_LC }}/user-service:latest --human --no-trunc \ No newline at end of file + docker history ghcr.io/${{ env.REPO_LC }}/user-service:${{ env.IMAGE_TAG }} --human --no-trunc + + swagger-docs: + name: Deploy Swagger Documentation + runs-on: ubuntu-latest + needs: + - build + - set-image-tag + if: startsWith(github.ref, 'refs/tags/user-service-v') + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Download OpenAPI spec artifacts + uses: actions/download-artifact@v4 + with: + name: openapi-spec-${{ github.run_id }}-${{ github.run_attempt }} + path: ./openapi-spec + + - name: Generate Swagger UI + uses: Legion2/swagger-ui-action@v1 + with: + output: user-service-swagger-ui-${{ env.IMAGE_TAG }} + spec-file: openapi-spec/openapi3.yaml + + - name: Deploy to GitHub Pages + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./user-service-swagger-ui-${{ env.IMAGE_TAG }} + destination_dir: user-service/${{ env.IMAGE_TAG }} \ No newline at end of file diff --git a/.github/workflows/ci-python.yml b/.github/workflows/ci-python.yml new file mode 100644 index 00000000..2ffa9661 --- /dev/null +++ b/.github/workflows/ci-python.yml @@ -0,0 +1,151 @@ +name: CI (Python/FastAPI) + +on: + push: + tags: + - 'pre-processing-v*' + paths: + - "apps/pre-processing-service/**" + pull_request: + types: [opened, synchronize, reopened, ready_for_review] + branches: + - main + - develop + - release/** + paths: + - "apps/pre-processing-service/**" + +permissions: + contents: read + packages: write + security-events: write + checks: write + pull-requests: write + +jobs: + lint: + if: github.event_name == 'pull_request' && github.event.pull_request.draft == false + name: Lint & Format Check + runs-on: ubuntu-latest + defaults: + run: + working-directory: apps/pre-processing-service + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v4 + with: + path: apps/pre-processing-service/.venv + key: venv-${{ runner.os }}-${{ hashFiles('apps/pre-processing-service/poetry.lock') }} + + - name: Install dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction --no-root + + - name: Run Formatter Check (Black) + run: poetry run black --check . + + test: + name: Run Tests + runs-on: ubuntu-latest + needs: lint + defaults: + run: + working-directory: apps/pre-processing-service + steps: + - name: Checkout repository + uses: actions/checkout@v4 + +# - name: Set up Python 3.11 +# uses: actions/setup-python@v5 +# with: +# python-version: "3.11" +# +# - name: Install Poetry +# uses: snok/install-poetry@v1 +# with: +# virtualenvs-create: true +# virtualenvs-in-project: true +# installer-parallel: true +# +# - name: Load cached venv +# id: cached-poetry-dependencies +# uses: actions/cache@v4 +# with: +# path: apps/pre-processing-service/.venv +# key: venv-${{ runner.os }}-${{ hashFiles('apps/pre-processing-service/poetry.lock') }} +# +# - name: Install dependencies +# if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' +# run: poetry install --no-interaction --no-root +# +# - name: Run tests with Pytest +# env: +# DB_HOST: localhost +# DB_PORT: 3306 +# DB_USER: test_user +# DB_PASS: test_pass +# DB_NAME: test_db +# ENV_NAME: test +# run: poetry run pytest + + set-image-tag: + name: Set IMAGE_TAG + runs-on: ubuntu-latest + if: startsWith(github.ref, 'refs/tags/pre-processing-v') + steps: + - name: Extract version from tag + run: | + IMAGE_TAG="${GITHUB_REF#refs/tags/pre-processing-}" + echo "IMAGE_TAG=$IMAGE_TAG" >> $GITHUB_ENV + + build-and-push-docker: + name: Build Docker Image and push + runs-on: ubuntu-latest + needs: + - test + - set-image-tag + if: startsWith(github.ref, 'refs/tags/pre-processing-v') + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Login to Docker Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set repo lowercase + run: echo "REPO_LC=${GITHUB_REPOSITORY,,}" >> $GITHUB_ENV + + - name: Build and push Docker image + uses: docker/build-push-action@v5 + with: + context: ./apps/pre-processing-service + push: true + tags: | + ghcr.io/${{ env.REPO_LC }}/pre-processing-service:${{ env.IMAGE_TAG }} + ghcr.io/${{ env.REPO_LC }}/pre-processing-service:latest + + - name: Analyze image layers + run: | + echo "=== Image Layer Analysis ===" + docker history ghcr.io/${{ env.REPO_LC }}/pre-processing-service:${{ env.IMAGE_TAG }} --human --no-trunc diff --git a/.github/workflows/deploy-java.yml b/.github/workflows/deploy-java.yml index e8b35476..69a1909d 100644 --- a/.github/workflows/deploy-java.yml +++ b/.github/workflows/deploy-java.yml @@ -14,6 +14,15 @@ jobs: steps: - uses: actions/checkout@v4 + - name: Create env file + run: | + echo "DB_HOST=${{ secrets.DB_HOST }}" > .env.prod + echo "DB_PORT=${{ secrets.DB_PORT }}" >> .env.prod + echo "DB_USER=${{ secrets.DB_USER }}" >> .env.prod + echo "DB_PASS=${{ secrets.DB_PASS }}" >> .env.prod + echo "DB_NAME=${{ secrets.DB_NAME }}" >> .env.prod + echo "ENV_NAME=${{ secrets.ENV_NAME }}" >> .env.prod + - name: Set repo lowercase run: echo "REPO_LC=${GITHUB_REPOSITORY,,}" >> $GITHUB_ENV @@ -26,15 +35,29 @@ jobs: source: "docker/production/docker-compose.yml" target: "~/app" + - name: Copy .env.prod file to EC2 + uses: appleboy/scp-action@v0.1.7 + with: + host: ${{ secrets.SERVER_HOST }} + username: ubuntu + key: ${{ secrets.SERVER_SSH_KEY }} + source: ".env.prod" + target: "~/app/docker/production/" + overwrite: true + - name: Deploy on EC2 uses: appleboy/ssh-action@v1.0.3 with: host: ${{ secrets.SERVER_HOST }} -# username: ${{ secrets.SERVER_USER }} username: ubuntu key: ${{ secrets.SERVER_SSH_KEY }} script: | cd ~/app/docker/production + + # Remove existing .env.prod if exists and move new one + rm -f .env.prod + mv .env.prod.bak .env.prod 2>/dev/null || mv .env.prod .env.prod + echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin docker pull ghcr.io/${{ env.REPO_LC }}/user-service:latest @@ -71,4 +94,4 @@ jobs: **Repository:** ${{ env.REPO_LC }} **Tag:** ${{ github.ref_name }} **Error:** ๋ฐฐํฌ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค. - **Check:** ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + **Check:** ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} \ No newline at end of file diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index 5adb153f..a29ba80b 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -4,31 +4,35 @@ on: push: branches: - main + - master pull_request: types: [opened, reopened, synchronize] + permissions: contents: read + pull-requests: read jobs: update_release_draft: permissions: - # write permission is required to create a github release contents: write - # write permission is required for autolabeler - # otherwise, read permission is required at least pull-requests: write runs-on: ubuntu-latest steps: - # (Optional) GitHub Enterprise requires GHE_HOST variable set - #- name: Set GHE_HOST - # run: | - # echo "GHE_HOST=${GITHUB_SERVER_URL##https:\/\/}" >> $GITHUB_ENV + # Pre-processing service + - uses: release-drafter/release-drafter@v5 + with: + config-name: release-drafter-pre-processing.yml + name: 'pre-processing-v$RESOLVED_VERSION' + tag: 'pre-processing-v$RESOLVED_VERSION' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # Drafts your next Release notes as Pull Requests are merged into "main" + # User service - uses: release-drafter/release-drafter@v5 - # (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml - # with: - # config-name: my-config.yml - # disable-autolabeler: true + with: + config-name: release-drafter-user-service.yml + name: 'user-service-v$RESOLVED_VERSION' + tag: 'user-service-v$RESOLVED_VERSION' env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/apps/pre-processing-service/.dockerignore b/apps/pre-processing-service/.dockerignore new file mode 100644 index 00000000..51db904a --- /dev/null +++ b/apps/pre-processing-service/.dockerignore @@ -0,0 +1,20 @@ +.git +.gitignore +**/__pycache__/ +**/*.pyc +**/.pytest_cache/ +**/.mypy_cache/ +**/.ruff_cache/ +**/.venv/ +**/node_modules/ +**/dist/ +**/build/ +tests/ +docs/ +scripts/ +.github/ +.env +.env.* +*.log +pytest-report.xml +coverage.xml diff --git a/apps/pre-processing-service/Dockerfile b/apps/pre-processing-service/Dockerfile index 073dea33..69b7cacd 100644 --- a/apps/pre-processing-service/Dockerfile +++ b/apps/pre-processing-service/Dockerfile @@ -1,18 +1,35 @@ +# ---- builder ---- FROM python:3.11-slim AS builder WORKDIR /app + +# ํ•„์ˆ˜ OS ํŒจํ‚ค์ง€ RUN apt-get update && apt-get install -y --no-install-recommends curl \ && rm -rf /var/lib/apt/lists/* + +# Poetry ์„ค์น˜ RUN curl -sSL https://install.python-poetry.org | python3 - ENV PATH="/root/.local/bin:$PATH" -RUN poetry config virtualenvs.create false +RUN poetry self add "poetry-plugin-export>=1.7.0" +# ๋Ÿฐํƒ€์ž„ ๊ฐ€์ƒํ™˜๊ฒฝ +RUN python -m venv /opt/venv +ENV PATH="/opt/venv/bin:$PATH" + +# ์˜์กด์„ฑ ํ•ด๊ฒฐ โ†’ requirements๋กœ export โ†’ pip๋กœ ์„ค์น˜(= ๋ฐ˜๋“œ์‹œ /opt/venv์— ์„ค์น˜๋จ) COPY pyproject.toml poetry.lock ./ -RUN poetry install --no-root +RUN poetry export --without dev -f requirements.txt -o requirements.txt \ + && pip install --no-cache-dir -r requirements.txt +# ---- runtime ---- FROM python:3.11-slim AS final WORKDIR /app -# site-packages + ์ฝ˜์†” ์Šคํฌ๋ฆฝํŠธ(gunicorn/uvicorn) ํ•จ๊ป˜ ๋ณต์‚ฌ -COPY --from=builder /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages -COPY --from=builder /usr/local/bin /usr/local/bin -COPY ./app ./app -EXPOSE 8000 -CMD ["gunicorn", "-w", "2", "-k", "uvicorn.workers.UvicornWorker", "-b", "0.0.0.0:8000", "app.main:app"] + +# /opt/venv ๋ณต์‚ฌ +COPY --from=builder /opt/venv /opt/venv +ENV PATH="/opt/venv/bin:$PATH" + +# ์•ฑ ์†Œ์Šค +COPY . . + + +# (๊ถŒ์žฅ ๋Œ€์•ˆ) ์ฝ”๋“œ์—์„œ uvicorn import ์•ˆ ํ•˜๊ณ  ํ”„๋กœ์„ธ์Šค ๋งค๋‹ˆ์ €๋กœ ์‹คํ–‰ํ•˜๋ ค๋ฉด: +CMD ["gunicorn", "-k", "uvicorn.workers.UvicornWorker", "app.main:app", "-b", "0.0.0.0:8000"] diff --git a/apps/pre-processing-service/app/api/endpoints/blog.py b/apps/pre-processing-service/app/api/endpoints/blog.py index 6a771cae..04ae0b14 100644 --- a/apps/pre-processing-service/app/api/endpoints/blog.py +++ b/apps/pre-processing-service/app/api/endpoints/blog.py @@ -4,64 +4,85 @@ from ...model.schemas import * from app.service.blog.tistory_blog_post_service import TistoryBlogPostService from app.service.blog.naver_blog_post_service import NaverBlogPostService +from ...service.blog.blogger_blog_post_service import BloggerBlogPostService -# ์ด ํŒŒ์ผ๋งŒ์˜ ๋…๋ฆฝ์ ์ธ ๋ผ์šฐํ„ฐ๋ฅผ ์ƒ์„ฑํ•ฉ๋‹ˆ๋‹ค. router = APIRouter() -@router.get("/") + +@router.get("/", summary="๋ธ”๋กœ๊ทธ API ์ƒํƒœ ํ™•์ธ") async def root(): return {"message": "blog API"} -@router.post("/rag/create", response_model=ResponseBlogCreate) + +@router.post( + "/rag/create", + response_model=ResponseBlogCreate, + summary="RAG ๊ธฐ๋ฐ˜ ๋ธ”๋กœ๊ทธ ์ฝ˜ํ…์ธ  ์ƒ์„ฑ", +) async def rag_create(request: RequestBlogCreate): """ RAG ๊ธฐ๋ฐ˜ ๋ธ”๋กœ๊ทธ ์ฝ˜ํ…์ธ  ์ƒ์„ฑ """ return {"message": "blog API"} -@router.post("/publish", response_model=ResponseBlogPublish) + +@router.post( + "/publish", + response_model=ResponseBlogPublish, + summary="๋ธ”๋กœ๊ทธ ์ฝ˜ํ…์ธ  ๋ฐฐํฌ (๋„ค์ด๋ฒ„/ํ‹ฐ์Šคํ† ๋ฆฌ/๋ธ”๋กœ๊ฑฐ ์ง€์›)", +) async def publish(request: RequestBlogPublish): """ - ์ƒ์„ฑ๋œ ๋ธ”๋กœ๊ทธ ์ฝ˜ํ…์ธ  ๋ฐฐํฌ - ๋„ค์ด๋ฒ„ ๋ธ”๋กœ๊ทธ์™€ ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ๋ฅผ ์ง€์› - ํ˜„์žฌ๋Š” ์ƒ์„ฑ๋œ ์ฝ˜ํ…์ธ ๊ฐ€ ์•„๋‹Œ, ์ž„์˜์˜ ์ œ๋ชฉ,๋‚ด์šฉ,ํƒœ๊ทธ๋ฅผ ๋ฐฐํฌ - :param request: RequestBlogPublish - :return: ResponseBlogPublish + ์ƒ์„ฑ๋œ ๋ธ”๋กœ๊ทธ ์ฝ˜ํ…์ธ ๋ฅผ ๋ฐฐํฌํ•ฉ๋‹ˆ๋‹ค. + ๋„ค์ด๋ฒ„ ๋ธ”๋กœ๊ทธ์™€ ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ๋ฅผ ์ง€์›ํ•˜๋ฉฐ, + ํ˜„์žฌ๋Š” ์ƒ์„ฑ๋œ ์ฝ˜ํ…์ธ ๊ฐ€ ์•„๋‹Œ ์ž„์˜์˜ ์ œ๋ชฉ, ๋‚ด์šฉ, ํƒœ๊ทธ๋ฅผ ๋ฐฐํฌํ•ฉ๋‹ˆ๋‹ค. """ - if request.tag == "naver": naver_service = NaverBlogPostService() result = naver_service.post_content( - title=request.title, - content=request.content, - tags=request.tags + title=request.post_title, + content=request.post_content, + tags=request.post_tags, ) if not result: - raise CustomException("๋„ค์ด๋ฒ„ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŒ…์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค.", status_code=500) + raise CustomException( + "๋„ค์ด๋ฒ„ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŒ…์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค.", status_code=500 + ) return ResponseBlogPublish( - job_id= 1, - schedule_id= 1, - schedule_his_id= 1, - status="200", - metadata=result + job_id=1, schedule_id=1, schedule_his_id=1, status="200", metadata=result ) - else: + elif request.tag == "tistory": tistory_service = TistoryBlogPostService() result = tistory_service.post_content( - title=request.title, - content=request.content, - tags=request.tags + title=request.post_title, + content=request.post_content, + tags=request.post_tags, + ) + + if not result: + raise CustomException( + "ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŒ…์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค.", status_code=500 + ) + + return ResponseBlogPublish( + job_id=1, schedule_id=1, schedule_his_id=1, status="200", metadata=result + ) + + elif request.tag == "blogger": + blogger_service = BloggerBlogPostService() + result = blogger_service.post_content( + title=request.post_title, + content=request.post_content, + tags=request.post_tags, ) if not result: - raise CustomException("ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŒ…์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค.", status_code=500) + raise CustomException( + "๋ธ”๋กœ๊ฑฐ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŒ…์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค.", status_code=500 + ) return ResponseBlogPublish( - job_id= 1, - schedule_id= 1, - schedule_his_id= 1, - status="200", - metadata=result - ) \ No newline at end of file + job_id=1, schedule_id=1, schedule_his_id=1, status="200", metadata=result + ) diff --git a/apps/pre-processing-service/app/api/endpoints/keywords.py b/apps/pre-processing-service/app/api/endpoints/keywords.py index 888ff0a0..2b407d6d 100644 --- a/apps/pre-processing-service/app/api/endpoints/keywords.py +++ b/apps/pre-processing-service/app/api/endpoints/keywords.py @@ -1,35 +1,48 @@ -# app/api/endpoints/keywords.py from ...service.keyword_service import keyword_search - from fastapi import APIRouter -from ...errors.CustomException import * +from ...errors.CustomException import * from ...model.schemas import RequestNaverSearch, ResponseNaverSearch -# ์ด ํŒŒ์ผ๋งŒ์˜ ๋…๋ฆฝ์ ์ธ ๋ผ์šฐํ„ฐ๋ฅผ ์ƒ์„ฑํ•ฉ๋‹ˆ๋‹ค. router = APIRouter() -@router.get("/") + +@router.get("/", summary="ํ‚ค์›Œ๋“œ API ์ƒํƒœ ํ™•์ธ") async def root(): + """ + ํ‚ค์›Œ๋“œ API๊ฐ€ ์ •์ƒ ๋™์ž‘ํ•˜๋Š”์ง€ ํ™•์ธ + """ return {"message": "keyword API"} -@router.post("/search",response_model=ResponseNaverSearch) + +@router.post( + "/search", response_model=ResponseNaverSearch, summary="๋„ค์ด๋ฒ„ ํ‚ค์›Œ๋“œ ๊ฒ€์ƒ‰" +) async def search(request: RequestNaverSearch): """ - ์ด ์—”๋“œํฌ์ธํŠธ๋Š” ์•„๋ž˜์™€ ๊ฐ™์€ JSON ์š”์ฒญ์„ ๋ฐ›์Šต๋‹ˆ๋‹ค. - RequestBase์™€ RequestNaverSearch์˜ ๋ชจ๋“  ํ•„๋“œ๋ฅผ ํฌํ•จํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค. + ์ด ์—”๋“œํฌ์ธํŠธ๋Š” JSON ์š”์ฒญ์œผ๋กœ ๋„ค์ด๋ฒ„ ํ‚ค์›Œ๋“œ ๊ฒ€์ƒ‰์„ ์ˆ˜ํ–‰ํ•ฉ๋‹ˆ๋‹ค. + + ์š”์ฒญ ์˜ˆ์‹œ: { - "job_id": "job-123", - "schedule_id": "schedule-456", - "schedule_his_id": 789, - "tag": "fastapi", - "category": "tech", - "start_date": "2025-09-01T12:00:00", - "end_date": "2025-09-02T15:00:00" + "job_id": 1, + "schedule_id": 1, + "schedule_his_id": 1, + "tag": "naver", + "category": "50000000", + "start_date": "2025-09-01", + "end_date": "2025-09-02" } """ - response_data= await keyword_search(request) + response_data = await keyword_search(request) return response_data -@router.post("/ssadagu/validate",response_model=ResponseNaverSearch) + +@router.post( + "/ssadagu/validate", + response_model=ResponseNaverSearch, + summary="์‚ฌ๋‹ค๊ตฌ๋ชฐ ํ‚ค์›Œ๋“œ ๊ฒ€์ฆ", +) async def ssadagu_validate(request: RequestNaverSearch): + """ + ์‚ฌ๋‹ค๊ตฌ๋ชฐ ํ‚ค์›Œ๋“œ ๊ฒ€์ฆ ํ…Œ์ŠคํŠธ์šฉ ์—”๋“œํฌ์ธํŠธ + """ return ResponseNaverSearch() diff --git a/apps/pre-processing-service/app/api/endpoints/product.py b/apps/pre-processing-service/app/api/endpoints/product.py index 4e8c6682..d3881ed6 100644 --- a/apps/pre-processing-service/app/api/endpoints/product.py +++ b/apps/pre-processing-service/app/api/endpoints/product.py @@ -1,46 +1,102 @@ from fastapi import APIRouter, Request, HTTPException from app.decorators.logging import log_api_call -from ...errors.CustomException import InvalidItemDataException, ItemNotFoundException -from ...service.crawl_service import crawl_product_detail -from ...service.search_service import search_products -from ...service.match_service import match_products -from ...service.similarity_service import select_product_by_similarity +from ...errors.CustomException import ( + InvalidItemDataException, + ItemNotFoundException, + CustomException, +) +from ...service.crawl_service import CrawlService +from ...service.search_service import SearchService +from ...service.match_service import MatchService +from ...service.similarity_service import SimilarityService from ...model.schemas import * router = APIRouter() -@router.get("/") + +@router.get("/", summary="์ƒํ’ˆ API ์ƒํƒœ ํ™•์ธ") async def root(): + """ + ์ƒํ’ˆ API ์„œ๋ฒ„ ์ƒํƒœ ํ™•์ธ์šฉ ์—”๋“œํฌ์ธํŠธ + """ return {"message": "product API"} -@router.post("/search", response_model=ResponseSadaguSearch) + +@router.post("/search", response_model=ResponseSadaguSearch, summary="์ƒํ’ˆ ๊ฒ€์ƒ‰") async def search(request: RequestSadaguSearch): """ - ์ƒํ’ˆ ๊ฒ€์ƒ‰ ์—”๋“œํฌ์ธํŠธ + ์š”์ฒญ๋œ ํ‚ค์›Œ๋“œ๋กœ ์‚ฌ๋‹ค๊ตฌ๋ชฐ ์ƒํ’ˆ์„ ๊ฒ€์ƒ‰ํ•ฉ๋‹ˆ๋‹ค. """ - return await search_products(request) + try: + search_service = SearchService() + result = await search_service.search_products(request) -@router.post("/match", response_model=ResponseSadaguMatch) + if not result: + raise CustomException(500, "์ƒํ’ˆ ๊ฒ€์ƒ‰์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค.", "SEARCH_FAILED") + + return result + except InvalidItemDataException as e: + raise HTTPException(status_code=e.status_code, detail=e.detail) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/match", response_model=ResponseSadaguMatch, summary="์ƒํ’ˆ ๋งค์นญ") async def match(request: RequestSadaguMatch): """ - ์ƒํ’ˆ ๋งค์นญ ์—”๋“œํฌ์ธํŠธ + ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์ƒํ’ˆ๊ณผ ํ‚ค์›Œ๋“œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ๋งค์นญ์„ ์ˆ˜ํ–‰ํ•ฉ๋‹ˆ๋‹ค. """ - return match_products(request) + try: + match_service = MatchService() + result = match_service.match_products(request) + + if not result: + raise CustomException(500, "์ƒํ’ˆ ๋งค์นญ์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค.", "MATCH_FAILED") -@router.post("/similarity", response_model=ResponseSadaguSimilarity) + return result + except InvalidItemDataException as e: + raise HTTPException(status_code=e.status_code, detail=e.detail) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post( + "/similarity", response_model=ResponseSadaguSimilarity, summary="์ƒํ’ˆ ์œ ์‚ฌ๋„ ๋ถ„์„" +) async def similarity(request: RequestSadaguSimilarity): """ - ์œ ์‚ฌ๋„ ๋ถ„์„ ์—”๋“œํฌ์ธํŠธ + ๋งค์นญ๋œ ์ƒํ’ˆ๋“ค ์ค‘ ํ‚ค์›Œ๋“œ์™€์˜ ์œ ์‚ฌ๋„๋ฅผ ๊ณ„์‚ฐํ•˜์—ฌ ์ตœ์ ์˜ ์ƒํ’ˆ์„ ์„ ํƒํ•ฉ๋‹ˆ๋‹ค. """ - return select_product_by_similarity(request) + try: + similarity_service = SimilarityService() + result = similarity_service.select_product_by_similarity(request) + + if not result: + raise CustomException( + 500, "์œ ์‚ฌ๋„ ๋ถ„์„์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค.", "SIMILARITY_FAILED" + ) + + return result + except InvalidItemDataException as e: + raise HTTPException(status_code=e.status_code, detail=e.detail) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + -@router.post("/crawl", response_model=ResponseSadaguCrawl) +@router.post( + "/crawl", response_model=ResponseSadaguCrawl, summary="์ƒํ’ˆ ์ƒ์„ธ ์ •๋ณด ํฌ๋กค๋ง" +) async def crawl(request: Request, body: RequestSadaguCrawl): """ - ์ƒํ’ˆ ์ƒ์„ธ ์ •๋ณด ํฌ๋กค๋ง ์—”๋“œํฌ์ธํŠธ + ์ƒํ’ˆ ์ƒ์„ธ ํŽ˜์ด์ง€๋ฅผ ํฌ๋กค๋งํ•˜์—ฌ ์ƒ์„ธ ์ •๋ณด๋ฅผ ์ˆ˜์ง‘ํ•ฉ๋‹ˆ๋‹ค. """ try: - result = await crawl_product_detail(body) + crawl_service = CrawlService() + result = await crawl_service.crawl_product_detail(body) + + if not result: + raise CustomException(500, "์ƒํ’ˆ ํฌ๋กค๋ง์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค.", "CRAWL_FAILED") + return result except InvalidItemDataException as e: raise HTTPException(status_code=e.status_code, detail=e.detail) diff --git a/apps/pre-processing-service/app/api/endpoints/test.py b/apps/pre-processing-service/app/api/endpoints/test.py index 2a33591e..6ed44d08 100644 --- a/apps/pre-processing-service/app/api/endpoints/test.py +++ b/apps/pre-processing-service/app/api/endpoints/test.py @@ -1,25 +1,38 @@ # app/api/endpoints/embedding.py +import loguru from fastapi import APIRouter +from sqlalchemy import text + from app.decorators.logging import log_api_call from ...errors.CustomException import * from fastapi import APIRouter +from typing import Mapping, Any, Dict +from ...model.schemas import * +from ...service.blog.naver_blog_post_service import NaverBlogPostService +from ...service.blog.tistory_blog_post_service import TistoryBlogPostService +from ...service.keyword_service import keyword_search +from ...service.match_service import MatchService +from ...service.search_service import SearchService +from ...service.similarity_service import SimilarityService +from ...db.db_connecter import engine # โœ… ์šฐ๋ฆฌ๊ฐ€ ๋งŒ๋“  DB ์œ ํ‹ธ ์ž„ํฌํŠธ # ์ด ํŒŒ์ผ๋งŒ์˜ ๋…๋ฆฝ์ ์ธ ๋ผ์šฐํ„ฐ๋ฅผ ์ƒ์„ฑํ•ฉ๋‹ˆ๋‹ค. router = APIRouter() + @router.get("/") async def root(): return {"message": "ํ…Œ์ŠคํŠธ API"} -@router.get("/hello/{name}" , tags=["hello"]) +@router.get("/hello/{name}", tags=["hello"]) # @log_api_call async def say_hello(name: str): return {"message": f"Hello {name}"} # ํŠน์ • ๊ฒฝ๋กœ์—์„œ ์˜๋„์ ์œผ๋กœ ์—๋Ÿฌ ๋ฐœ์ƒ -#์ปค์Šคํ…€์—๋Ÿฌ ํ…Œ์Šคํ„ฐ url +# ์ปค์Šคํ…€์—๋Ÿฌ ํ…Œ์Šคํ„ฐ url @router.get("/error/{item_id}") async def trigger_error(item_id: int): if item_id == 0: @@ -31,5 +44,82 @@ async def trigger_error(item_id: int): if item_id == 500: raise ValueError("์ด๊ฒƒ์€ ํ…Œ์ŠคํŠธ์šฉ ๊ฐ’ ์˜ค๋ฅ˜์ž…๋‹ˆ๋‹ค.") + return {"result": item_id} + + +@router.get("/db-test", tags=["db"]) +async def db_test(): + """๊ฐ„๋‹จํ•œ DB ์—ฐ๊ฒฐ ๋ฐ ์ฟผ๋ฆฌ ํ…Œ์ŠคํŠธ""" + try: + with engine.connect() as conn: + result = conn.execute(text("SELECT NOW() as now")) + row = result.fetchone() + return {"status": "ok", "db_time": str(row.now)} + except Exception as e: + return {"status": "error", "detail": str(e)} + + +def with_meta(data: Mapping[str, Any], meta: Mapping[str, Any]) -> Dict[str, Any]: + """์š”์ฒญ payload + ๊ณตํ†ต meta ๋จธ์ง€""" + return {**meta, **data} + + +@router.get("/tester", response_model=None) +async def processing_tester(): + meta = { + "job_id": 1, + "schedule_id": 1, + "schedule_his_id": 1, # โœ… ํƒ€์ดํฌ ์ˆ˜์ • + } + request_dict = { + "tag": "naver", + "category": "50000000", + "start_date": "2025-09-01", + "end_date": "2025-09-02", + } + # ๋„ค์ด๋ฒ„ ํ‚ค์›Œ๋“œ ๊ฒ€์ƒ‰ + naver_request = RequestNaverSearch(**with_meta(meta, request_dict)) + response_data = await keyword_search(naver_request) + keyword = response_data.get("keyword") + loguru.logger.info(keyword) + + keyword = { + "keyword": keyword, + } + + # ์‹ธ๋‹ค๊ตฌ ์ƒํ’ˆ ๊ฒ€์ƒ‰ + sadagu_request = RequestSadaguSearch(**with_meta(meta, keyword)) + search_service = SearchService() + keyword_result = await search_service.search_products(sadagu_request) + loguru.logger.info(keyword_result) + + # ์‹ธ๋‹ค๊ตฌ ์ƒํ’ˆ ๋งค์น˜ + keyword["search_results"] = keyword_result.get("search_results") + keyword_match_request = RequestSadaguMatch(**with_meta(meta, keyword)) + match_service = MatchService() + keyword_match_response = match_service.match_products(keyword_match_request) + loguru.logger.info(keyword_match_response) + + # ์‹ธ๋‹ค๊ตฌ ์ƒํ’ˆ ์œ ์‚ฌ๋„ ๋ถ„์„ + keyword["matched_products"] = keyword_match_response.get("matched_products") + keyword_similarity_request = RequestSadaguSimilarity(**with_meta(meta, keyword)) + similarity_service = SimilarityService() + keyword_similarity_response = similarity_service.select_product_by_similarity( + keyword_similarity_request + ) + loguru.logger.info(keyword_similarity_response) + + # ์‹ธ๋‹ค๊ตฌ ์ƒํ’ˆ ํฌ๋กค๋ง + + # ๋ธ”๋กœ๊ทธ ์ƒ์„ฑ + + # ๋ธ”๋กœ๊ทธ ๋ฐฐํฌ + tistory_service = TistoryBlogPostService() + result = tistory_service.post_content( + title="์•ˆ๋…•ํ•˜์‚ด๋ฒ•", + content="์•ˆ๋…•ํ•˜์‚ด๋ฒ• ๋ฐ›์•„์น˜๊ธฐ๋Ÿฌ๊ธฐ ์ฝ”๋“œ ๋ฐ›์•„์น˜๊ธฐ", + tags=["ํ‰ํ‰ํ‰์‚ฌํ›„๋ฅด", "์งœ๋ผ์งœ๋ผ"], + ) + loguru.logger.info(result) - return {"result": item_id} \ No newline at end of file + return "๊ตฌ์›ƒ" diff --git a/apps/pre-processing-service/app/api/router.py b/apps/pre-processing-service/app/api/router.py index 683f42a7..99286cf6 100644 --- a/apps/pre-processing-service/app/api/router.py +++ b/apps/pre-processing-service/app/api/router.py @@ -11,22 +11,21 @@ # processing API URL api_router.include_router(blog.router, prefix="/blogs", tags=["blog"]) -#์ƒํ’ˆ API URL +# ์ƒํ’ˆ API URL api_router.include_router(product.router, prefix="/products", tags=["product"]) -#๋ชจ๋“ˆ ํ…Œ์Šคํ„ฐ๋ฅผ ์œ„ํ•œ endpoint -> ์ถ”ํ›„ ์‚ญ์ œ ์˜ˆ์ • +# ๋ชจ๋“ˆ ํ…Œ์Šคํ„ฐ๋ฅผ ์œ„ํ•œ endpoint -> ์ถ”ํ›„ ์‚ญ์ œ ์˜ˆ์ • api_router.include_router(test.router, prefix="/tests", tags=["Test"]) -@api_router.get("/") + +@api_router.get("/ping") async def root(): return {"message": "์„œ๋ฒ„ ์‹คํ–‰์ค‘์ž…๋‹ˆ๋‹ค."} + @api_router.get("/db") def get_settings(): """ ํ™˜๊ฒฝ ๋ณ€์ˆ˜๊ฐ€ ์˜ฌ๋ฐ”๋ฅด๊ฒŒ ๋กœ๋“œ๋˜์—ˆ๋Š”์ง€ ํ™•์ธํ•˜๋Š” ์—”๋“œํฌ์ธํŠธ """ - return { - "ํ™˜๊ฒฝ": settings.env_name, - "๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค URL": settings.db_url - } + return {"ํ™˜๊ฒฝ": settings.env_name, "๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค URL": settings.db_url} diff --git a/apps/pre-processing-service/app/core/config.py b/apps/pre-processing-service/app/core/config.py index 52930483..ed54cc69 100644 --- a/apps/pre-processing-service/app/core/config.py +++ b/apps/pre-processing-service/app/core/config.py @@ -11,14 +11,19 @@ def detect_mecab_dicdir() -> Optional[str]: # 1. mecab-config ๋ช…๋ น์–ด๋กœ ์‚ฌ์ „ ๊ฒฝ๋กœ ํ™•์ธ (๊ฐ€์žฅ ์ •ํ™•ํ•œ ๋ฐฉ๋ฒ•) try: - result = subprocess.run(['mecab-config', '--dicdir'], - capture_output=True, text=True, timeout=5) + result = subprocess.run( + ["mecab-config", "--dicdir"], capture_output=True, text=True, timeout=5 + ) if result.returncode == 0: dicdir = result.stdout.strip() if os.path.exists(dicdir): print(f"mecab-config์—์„œ ์‚ฌ์ „ ๊ฒฝ๋กœ ๋ฐœ๊ฒฌ: {dicdir}") return dicdir - except (subprocess.CalledProcessError, FileNotFoundError, subprocess.TimeoutExpired): + except ( + subprocess.CalledProcessError, + FileNotFoundError, + subprocess.TimeoutExpired, + ): pass # 2. ํ”Œ๋žซํผ๋ณ„ ์ผ๋ฐ˜์ ์ธ ๊ฒฝ๋กœ๋“ค ํ™•์ธ @@ -29,7 +34,7 @@ def detect_mecab_dicdir() -> Optional[str]: "/opt/homebrew/lib/mecab/dic/mecab-ko-dic", # Apple Silicon "/usr/local/lib/mecab/dic/mecab-ko-dic", # Intel Mac "/opt/homebrew/lib/mecab/dic/mecab-ipadic", # ๊ธฐ๋ณธ ์‚ฌ์ „ - "/usr/local/lib/mecab/dic/mecab-ipadic" + "/usr/local/lib/mecab/dic/mecab-ipadic", ] elif system == "linux": candidate_paths = [ @@ -38,13 +43,13 @@ def detect_mecab_dicdir() -> Optional[str]: "/usr/local/lib/mecab/dic/mecab-ko-dic", "/usr/share/mecab/dic/mecab-ko-dic", "/usr/lib/mecab/dic/mecab-ipadic", - "/usr/local/lib/mecab/dic/mecab-ipadic" + "/usr/local/lib/mecab/dic/mecab-ipadic", ] elif system == "windows": candidate_paths = [ "C:/Program Files/MeCab/dic/mecab-ko-dic", "C:/mecab/dic/mecab-ko-dic", - "C:/Program Files/MeCab/dic/mecab-ipadic" + "C:/Program Files/MeCab/dic/mecab-ipadic", ] else: candidate_paths = [] @@ -60,6 +65,7 @@ def detect_mecab_dicdir() -> Optional[str]: return None + # ๊ณตํ†ต ์„ค์ •์„ ์œ„ํ•œ BaseSettings class BaseSettingsConfig(BaseSettings): @@ -69,17 +75,11 @@ class BaseSettingsConfig(BaseSettings): db_user: str db_pass: str db_name: str - env_name: str = ".dev" + env_name: str # MeCab ์‚ฌ์ „ ๊ฒฝ๋กœ (์ž๋™ ๊ฐ์ง€) mecab_path: Optional[str] = None - # ์™ธ๋ถ€ ์„œ๋น„์Šค ๊ณ„์ • ์ •๋ณด - naver_id: Optional[str] = None - naver_password: Optional[str] = None - tistory_id: Optional[str] = None - tistory_password: Optional[str] = None - def __init__(self, **kwargs): super().__init__(**kwargs) @@ -92,18 +92,23 @@ def __init__(self, **kwargs): @property def db_url(self) -> str: """๊ฐœ๋ณ„ ํ•„๋“œ๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ DB URL์„ ๋™์ ์œผ๋กœ ์ƒ์„ฑ""" - return f"postgresql://{self.db_user}:{self.db_pass}@{self.db_host}:{self.db_port}/{self.db_name}" + return ( + f"mysql+pymysql://{self.db_user}:" + f"{self.db_pass}" + f"@{self.db_host}:{self.db_port}/{self.db_name}" + ) - model_config = SettingsConfigDict(env_file=['.env']) + model_config = SettingsConfigDict(env_file=[".env"]) # ํ™˜๊ฒฝ๋ณ„ ์„ค์ • ํด๋ž˜์Šค class DevSettings(BaseSettingsConfig): - model_config = SettingsConfigDict(env_file=['.env', '.dev.env']) + model_config = SettingsConfigDict(env_file=[".env", ".env.dev"]) class PrdSettings(BaseSettingsConfig): - model_config = SettingsConfigDict(env_file=['.env', '.prd.env']) + model_config = SettingsConfigDict(env_file=[".env", ".env.prod"]) + def get_settings() -> BaseSettingsConfig: """ํ™˜๊ฒฝ ๋ณ€์ˆ˜์— ๋”ฐ๋ผ ์ ์ ˆํ•œ ์„ค์ • ๊ฐ์ฒด๋ฅผ ๋ฐ˜ํ™˜ํ•˜๋Š” ํ•จ์ˆ˜""" @@ -116,4 +121,4 @@ def get_settings() -> BaseSettingsConfig: raise ValueError(f"Invalid MODE environment variable: {mode}") -settings = get_settings() \ No newline at end of file +settings = get_settings() diff --git a/apps/pre-processing-service/app/db/AsyncPostgreSQLManager.py b/apps/pre-processing-service/app/db/AsyncPostgreSQLManager.py index a6152755..c783e8c8 100644 --- a/apps/pre-processing-service/app/db/AsyncPostgreSQLManager.py +++ b/apps/pre-processing-service/app/db/AsyncPostgreSQLManager.py @@ -44,11 +44,11 @@ def __init__(self): self._pool = None self._config = { - 'host': os.getenv('DB_HOST', '52.79.235.214'), - 'port': int(os.getenv('DB_PORT', 5432)), - 'database': os.getenv('DB_NAME', 'pre_process'), - 'user': os.getenv('DB_USER', 'postgres'), - 'password': os.getenv('DB_PASSWORD', 'qwer1234') + "host": os.getenv("DB_HOST", "52.79.235.214"), + "port": int(os.getenv("DB_PORT", 5432)), + "database": os.getenv("DB_NAME", "pre_process"), + "user": os.getenv("DB_USER", "postgres"), + "password": os.getenv("DB_PASSWORD", "qwer1234"), } self._initialized = True @@ -72,9 +72,7 @@ async def init_pool(self, min_size=5, max_size=20): if self._pool is None: self._pool = await asyncpg.create_pool( - min_size=min_size, - max_size=max_size, - **self._config + min_size=min_size, max_size=max_size, **self._config ) return self._pool @@ -182,8 +180,9 @@ async def close_pool(self): self._pool = None print("๋น„๋™๊ธฐ DB ์—ฐ๊ฒฐ ํ’€ ์ „์ฒด ์ข…๋ฃŒ") + """ # ์‚ฌ์šฉ ์˜ˆ์‹œ init_pool() - ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์‹œ์ž‘ ์‹œ ๋‹จ ํ•œ๋ฒˆ๋งŒ ํ˜ธ์ถœ (main.py์—์„œ ์‹คํ–‰, early startup) -""" \ No newline at end of file +""" diff --git a/apps/pre-processing-service/app/db/MariadbManager.py b/apps/pre-processing-service/app/db/MariadbManager.py deleted file mode 100644 index e69de29b..00000000 diff --git a/apps/pre-processing-service/app/db/PostgreSQLManager.py b/apps/pre-processing-service/app/db/PostgreSQLManager.py index 606f7b5c..ca3ccede 100644 --- a/apps/pre-processing-service/app/db/PostgreSQLManager.py +++ b/apps/pre-processing-service/app/db/PostgreSQLManager.py @@ -5,6 +5,7 @@ import os import threading + class PostgreSQLManager: """ PostgreSQL ๋งค๋‹ˆ์ € ํด๋ž˜์Šค @@ -42,11 +43,11 @@ def __init__(self): self._pool = None self._config = { - 'host': os.getenv('DB_HOST', '52.79.235.214'), - 'port': int(os.getenv('DB_PORT', '5432')), - 'database': os.getenv('DB_NAME', 'pre_process'), - 'user': os.getenv('DB_USER', 'postgres'), - 'password': os.getenv('DB_PASSWORD', 'qwer1234') + "host": os.getenv("DB_HOST", "52.79.235.214"), + "port": int(os.getenv("DB_PORT", "5432")), + "database": os.getenv("DB_NAME", "pre_process"), + "user": os.getenv("DB_USER", "postgres"), + "password": os.getenv("DB_PASSWORD", "qwer1234"), } self._initialized = True @@ -130,6 +131,7 @@ def close_pool(self): self._pool = None print("DB ์—ฐ๊ฒฐ ํ’€ ์ „์ฒด ์ข…๋ฃŒ") + """ # get_cursor ์‚ฌ์šฉ ์˜ˆ์‹œ : ๋ฆฌ์†Œ์Šค ์ž๋™ ์ •๋ฆฌ try: @@ -139,4 +141,4 @@ def close_pool(self): except Exception as e: print(f"์—๋Ÿฌ ๋ฐœ์ƒ: {e}") # ์ž๋™์œผ๋กœ ๋กค๋ฐฑ, ์ปค์„œ ๋‹ซ๊ธฐ, ์ปค๋„ฅ์…˜ ๋ฐ˜ํ™˜ ์ˆ˜ํ–‰ -""" \ No newline at end of file +""" diff --git a/apps/pre-processing-service/app/db/db_connecter.py b/apps/pre-processing-service/app/db/db_connecter.py index 0ed48b04..027d924d 100644 --- a/apps/pre-processing-service/app/db/db_connecter.py +++ b/apps/pre-processing-service/app/db/db_connecter.py @@ -1 +1,8 @@ -from ..core.config import settings \ No newline at end of file +from ..core.config import settings +from sqlalchemy import create_engine, text +from app.core.config import settings + +engine = create_engine( + settings.db_url, + pool_pre_ping=True, # ์—ฐ๊ฒฐ ์œ ํšจ์„ฑ ์ฒดํฌ +) diff --git a/apps/pre-processing-service/app/db/mariadb_manager.py b/apps/pre-processing-service/app/db/mariadb_manager.py new file mode 100644 index 00000000..63288b13 --- /dev/null +++ b/apps/pre-processing-service/app/db/mariadb_manager.py @@ -0,0 +1,153 @@ +import pymysql +import os +import threading + +from contextlib import contextmanager +from dotenv import load_dotenv +from dbutils.pooled_db import PooledDB + + +class MariadbManager: + """ + MariaDB ๋งค๋‹ˆ์ € ํด๋ž˜์Šค + 1. MariaDB ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์—ฐ๊ฒฐ ๋ฐ ๊ด€๋ฆฌ + 2. ์ปค๋„ฅ์…˜ ํ’€๋ง ์ง€์› + 3. ์ปค์„œ ๋ฐ ์ปค๋„ฅ์…˜ ์ปจํ…์ŠคํŠธ ๋งค๋‹ˆ์ € ์ œ๊ณต + """ + + _instance = None + _lock = threading.Lock() + load_dotenv() + + def __new__(cls): + """ + ์‹ฑ๊ธ€ํ†ค ํŒจํ„ด ๊ตฌํ˜„ + ์Šค๋ ˆ๋“œ ์•ˆ์ „์„ฑ์„ ์œ„ํ•ด Lock ์‚ฌ์šฉ + Double-checked locking ์ ์šฉ + """ + + if cls._instance is None: + with cls._lock: + if cls._instance is None: + cls._instance = super(MariadbManager, cls).__new__(cls) + cls._instance._initialized = False + return cls._instance + + def __init__(self): + """ + MariaDB ๋งค๋‹ˆ์ € ์ดˆ๊ธฐํ™” + ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์—ฐ๊ฒฐ ์„ค์ • + ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์„ค์ • ๋กœ๋“œ ๋ฐ ๊ฒ€์ฆ + """ + + if self._initialized: + return + + self._config = { + "host": os.getenv("DB_HOST", "localhost"), + "port": int(os.getenv("DB_PORT", "3306")), + "database": os.getenv("DB_NAME", "pre_process"), + "user": os.getenv("DB_USER", "mariadb"), + "password": os.getenv("DB_PASSWORD", "qwer1234"), + "autocommit": False, + } + + required_keys = ["host", "database", "user", "password"] + missing = [ + k + for k, v in self._config.items() + if k in required_keys and (v is None or v == "") + ] + if missing: + raise ValueError(f"ํ•„์ˆ˜ ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์„ค์ •์ด ๋ˆ„๋ฝ๋˜์—ˆ์Šต๋‹ˆ๋‹ค: {missing}") + + self._pool = None + self._initialized = True + + def _init_pool(self, pool_size=20): + """ + MariaDB ์ „์šฉ ์ปค๋„ฅ์…˜ ํ’€ ์ดˆ๊ธฐํ™” + :param pool_size: ํ’€ ํฌ๊ธฐ + """ + + if self._pool is None: + config = {**self._config} + try: + self._pool = PooledDB( + creator=pymysql, + maxconnections=pool_size, + mincached=2, + maxcached=5, + maxshared=3, + blocking=True, + maxusage=None, + setsession=[], + ping=0, + **config, + ) + except pymysql.Error as e: + raise Exception(f"MariaDB ์ปค๋„ฅ์…˜ ํ’€ ์ดˆ๊ธฐํ™” ์‹คํŒจ: {e}") + + @contextmanager + def get_cursor(self): + """ + ์ปค์„œ ์ปจํ…์ŠคํŠธ ๋งค๋‹ˆ์ € - ์ผ๋ฐ˜์ ์ธ ์ฟผ๋ฆฌ์šฉ + :return: ์ปค์„œ ๊ฐ์ฒด + """ + + if self._pool is None: + self._init_pool() + + try: + conn = self._pool.connection() + except Exception as e: + raise Exception(f"์ปค๋„ฅ์…˜ ํ’€์—์„œ ์—ฐ๊ฒฐ ํš๋“ ์‹คํŒจ: {e}") + + cursor = None + try: + cursor = conn.cursor() + yield cursor + conn.commit() + except Exception as e: + if conn: + conn.rollback() + raise e + finally: + if cursor: + cursor.close() + if conn: + conn.close() + + @contextmanager + def get_connection(self): + """ + ์ปค๋„ฅ์…˜ ์ปจํ…์ŠคํŠธ ๋งค๋‹ˆ์ € + :return: ์ปค๋„ฅ์…˜ ๊ฐ์ฒด + """ + + if self._pool is None: + self._init_pool() + + try: + conn = self._pool.connection() + except Exception as e: + raise Exception(f"์ปค๋„ฅ์…˜ ํ’€์—์„œ ์—ฐ๊ฒฐ ํš๋“ ์‹คํŒจ: {e}") + + try: + yield conn + conn.commit() + except Exception as e: + if conn: + conn.rollback() + raise e + finally: + if conn: + conn.close() + + def close_pool(self): + """ + ํ’€ ์ข…๋ฃŒ + """ + if self._pool: + self._pool.close() + self._pool = None diff --git a/apps/pre-processing-service/app/decorators/logging.py b/apps/pre-processing-service/app/decorators/logging.py index 145cb0a0..23604a73 100644 --- a/apps/pre-processing-service/app/decorators/logging.py +++ b/apps/pre-processing-service/app/decorators/logging.py @@ -16,7 +16,7 @@ def log_api_call(func): async def wrapper(*args, **kwargs): # 1. request ๊ฐ์ฒด๋ฅผ ์•ˆ์ „ํ•˜๊ฒŒ ๊ฐ€์ ธ์˜ต๋‹ˆ๋‹ค. # kwargs์—์„œ 'request'๋ฅผ ์ฐพ๊ณ , ์—†์œผ๋ฉด args๊ฐ€ ๋น„์–ด์žˆ์ง€ ์•Š์€ ๊ฒฝ์šฐ์—๋งŒ args[0]์„ ์‹œ๋„ํ•ฉ๋‹ˆ๋‹ค. - request: Request | None = kwargs.get('request') + request: Request | None = kwargs.get("request") if request is None and args and isinstance(args[0], Request): request = args[0] @@ -28,19 +28,17 @@ async def wrapper(*args, **kwargs): user_agent = request.headers.get("user-agent", "N/A") # 3. ์š”์ฒญ ์ •๋ณด๋ฅผ ๋กœ๊ทธ๋กœ ๊ธฐ๋กํ•ฉ๋‹ˆ๋‹ค. - log_context = { - "func": func.__name__, - "ip": client_ip, - "user_agent": user_agent - } + log_context = {"func": func.__name__, "ip": client_ip, "user_agent": user_agent} if request: - log_context.update({ - "url": str(request.url), - "method": request.method, - }) + log_context.update( + { + "url": str(request.url), + "method": request.method, + } + ) logger.info( "API ํ˜ธ์ถœ ์‹œ์ž‘: URL='{url}' ๋ฉ”์„œ๋“œ='{method}' ํ•จ์ˆ˜='{func}' IP='{ip}' User-Agent='{user_agent}'", - **log_context + **log_context, ) else: logger.info("API ํ˜ธ์ถœ ์‹œ์ž‘: ํ•จ์ˆ˜='{func}'", **log_context) @@ -61,12 +59,12 @@ async def wrapper(*args, **kwargs): if request: logger.error( "API ํ˜ธ์ถœ ์‹คํŒจ: URL='{url}' ๋ฉ”์„œ๋“œ='{method}' IP='{ip}' ์˜ˆ์™ธ='{exception}' ({elapsed})", - **log_context + **log_context, ) else: logger.error( "API ํ˜ธ์ถœ ์‹คํŒจ: ํ•จ์ˆ˜='{func}' ์˜ˆ์™ธ='{exception}' ({elapsed})", - **log_context + **log_context, ) raise # ์˜ˆ์™ธ๋ฅผ ๋‹ค์‹œ ๋ฐœ์ƒ์‹œ์ผœ FastAPI๊ฐ€ ์ฒ˜๋ฆฌํ•˜๋„๋ก ํ•ฉ๋‹ˆ๋‹ค. finally: @@ -77,12 +75,11 @@ async def wrapper(*args, **kwargs): if request: logger.success( "API ํ˜ธ์ถœ ์„ฑ๊ณต: URL='{url}' ๋ฉ”์„œ๋“œ='{method}' IP='{ip}' ({elapsed})", - **log_context + **log_context, ) else: logger.success( - "API ํ˜ธ์ถœ ์„ฑ๊ณต: ํ•จ์ˆ˜='{func}' ({elapsed})", - **log_context + "API ํ˜ธ์ถœ ์„ฑ๊ณต: ํ•จ์ˆ˜='{func}' ({elapsed})", **log_context ) - return wrapper \ No newline at end of file + return wrapper diff --git a/apps/pre-processing-service/app/errors/BlogPostingException.py b/apps/pre-processing-service/app/errors/BlogPostingException.py index d0b360a8..f0d49484 100644 --- a/apps/pre-processing-service/app/errors/BlogPostingException.py +++ b/apps/pre-processing-service/app/errors/BlogPostingException.py @@ -1,79 +1,110 @@ from app.errors.CustomException import CustomException from typing import List, Optional + class BlogLoginException(CustomException): """ ๋ธ”๋กœ๊ทธ ๋กœ๊ทธ์ธ ์‹คํŒจ ์˜ˆ์™ธ @:param platform: ๋กœ๊ทธ์ธํ•˜๋ ค๋Š” ํ”Œ๋žซํผ (๋„ค์ด๋ฒ„, ํ‹ฐ์Šคํ† ๋ฆฌ ๋“ฑ) @:param reason: ๋กœ๊ทธ์ธ ์‹คํŒจ ์ด์œ  """ + def __init__(self, platform: str, reason: str = "์ธ์ฆ ์ •๋ณด๊ฐ€ ์˜ฌ๋ฐ”๋ฅด์ง€ ์•Š์Šต๋‹ˆ๋‹ค"): super().__init__( status_code=401, detail=f"{platform} ๋กœ๊ทธ์ธ์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค. {reason}", - code="BLOG_LOGIN_FAILED" + code="BLOG_LOGIN_FAILED", ) + class BlogPostPublishException(CustomException): """ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŠธ ๋ฐœํ–‰ ์‹คํŒจ ์˜ˆ์™ธ @:param platform: ๋ฐœํ–‰ํ•˜๋ ค๋Š” ํ”Œ๋žซํผ @:param reason: ๋ฐœํ–‰ ์‹คํŒจ ์ด์œ  """ - def __init__(self, platform: str, reason: str = "ํฌ์ŠคํŠธ ๋ฐœํ–‰ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค"): + + def __init__( + self, platform: str, reason: str = "ํฌ์ŠคํŠธ ๋ฐœํ–‰ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค" + ): super().__init__( status_code=422, detail=f"{platform} ํฌ์ŠคํŠธ ๋ฐœํ–‰์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค. {reason}", - code="BLOG_POST_PUBLISH_FAILED" + code="BLOG_POST_PUBLISH_FAILED", ) + class BlogContentValidationException(CustomException): """ ๋ธ”๋กœ๊ทธ ์ฝ˜ํ…์ธ  ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ ์‹คํŒจ ์˜ˆ์™ธ @:param field: ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ ์‹คํŒจํ•œ ํ•„๋“œ @:param reason: ์‹คํŒจ ์ด์œ  """ + def __init__(self, field: str, reason: str): super().__init__( status_code=400, detail=f"์ฝ˜ํ…์ธ  ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ ์‹คํŒจ: {field} - {reason}", - code="BLOG_CONTENT_VALIDATION_FAILED" + code="BLOG_CONTENT_VALIDATION_FAILED", ) + class BlogElementInteractionException(CustomException): """ ๋ธ”๋กœ๊ทธ ํŽ˜์ด์ง€ ์š”์†Œ์™€์˜ ์ƒํ˜ธ์ž‘์šฉ ์‹คํŒจ ์˜ˆ์™ธ @:param element: ์ƒํ˜ธ์ž‘์šฉํ•˜๋ ค๋˜ ์š”์†Œ @:param action: ์ˆ˜ํ–‰ํ•˜๋ ค๋˜ ์•ก์…˜ """ + def __init__(self, element: str, action: str): super().__init__( status_code=422, detail=f"๋ธ”๋กœ๊ทธ ํŽ˜์ด์ง€ ์š”์†Œ ์ƒํ˜ธ์ž‘์šฉ ์‹คํŒจ: {element}์—์„œ {action} ์ž‘์—… ์‹คํŒจ", - code="BLOG_ELEMENT_INTERACTION_FAILED" + code="BLOG_ELEMENT_INTERACTION_FAILED", ) + class BlogServiceUnavailableException(CustomException): """ ๋ธ”๋กœ๊ทธ ์„œ๋น„์Šค ์ด์šฉ ๋ถˆ๊ฐ€ ์˜ˆ์™ธ @:param platform: ์ด์šฉ ๋ถˆ๊ฐ€ํ•œ ํ”Œ๋žซํผ @:param reason: ์ด์šฉ ๋ถˆ๊ฐ€ ์ด์œ  """ - def __init__(self, platform: str, reason: str = "์„œ๋น„์Šค๊ฐ€ ์ผ์‹œ์ ์œผ๋กœ ์ด์šฉ ๋ถˆ๊ฐ€ํ•ฉ๋‹ˆ๋‹ค"): + + def __init__( + self, platform: str, reason: str = "์„œ๋น„์Šค๊ฐ€ ์ผ์‹œ์ ์œผ๋กœ ์ด์šฉ ๋ถˆ๊ฐ€ํ•ฉ๋‹ˆ๋‹ค" + ): super().__init__( status_code=503, detail=f"{platform} ์„œ๋น„์Šค ์ด์šฉ ๋ถˆ๊ฐ€: {reason}", - code="BLOG_SERVICE_UNAVAILABLE" + code="BLOG_SERVICE_UNAVAILABLE", ) + class BlogConfigurationException(CustomException): """ ๋ธ”๋กœ๊ทธ ์„œ๋น„์Šค ์„ค์ • ์˜ค๋ฅ˜ ์˜ˆ์™ธ @:param config_item: ์„ค์ • ์˜ค๋ฅ˜ ํ•ญ๋ชฉ """ + def __init__(self, config_item: str): super().__init__( status_code=500, detail=f"๋ธ”๋กœ๊ทธ ์„œ๋น„์Šค ์„ค์ • ์˜ค๋ฅ˜: {config_item}", - code="BLOG_CONFIGURATION_ERROR" - ) \ No newline at end of file + code="BLOG_CONFIGURATION_ERROR", + ) + + +class BloggerApiException(CustomException): + """ + Blogger API ๊ด€๋ จ ์˜ค๋ฅ˜ ์˜ˆ์™ธ + @:param reason: ์‹คํŒจ ์ด์œ  + @:param detail: ์ƒ์„ธ ์˜ค๋ฅ˜ ๋ฉ”์‹œ์ง€ + """ + + def __init__(self, reason: str, detail: str): + super().__init__( + status_code=500, + detail=f"Blogger API ์˜ค๋ฅ˜: {reason} ({detail})", + code="BLOGGER_API_ERROR", + ) diff --git a/apps/pre-processing-service/app/errors/CrawlingException.py b/apps/pre-processing-service/app/errors/CrawlingException.py index 1928e30f..4db0ff43 100644 --- a/apps/pre-processing-service/app/errors/CrawlingException.py +++ b/apps/pre-processing-service/app/errors/CrawlingException.py @@ -1,27 +1,31 @@ from app.errors.CustomException import CustomException from typing import List + class PageLoadTimeoutException(CustomException): """ ํŽ˜์ด์ง€ ๋กœ๋“œ ํƒ€์ž„์•„์›ƒ ์˜ˆ์™ธ @:param url: ๋กœ๋“œํ•˜๋ ค๋Š” ํŽ˜์ด์ง€์˜ URL """ - def __init__(self, url : str): + + def __init__(self, url: str): super().__init__( status_code=408, detail=f"ํŽ˜์ด์ง€ ๋กœ๋“œ๊ฐ€ ์‹œ๊ฐ„ ์ดˆ๊ณผ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. URL: {url}", - code="PAGE_LOAD_TIMEOUT" + code="PAGE_LOAD_TIMEOUT", ) + class WebDriverConnectionException(CustomException): """ ์›น ๋“œ๋ผ์ด๋ฒ„ ์—ฐ๊ฒฐ ์‹คํŒจ ์˜ˆ์™ธ """ + def __init__(self): super().__init__( status_code=500, detail="์›น ๋“œ๋ผ์ด๋ฒ„ ์—ฐ๊ฒฐ์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค.", - code="WEBDRIVER_ERROR" + code="WEBDRIVER_ERROR", ) @@ -30,34 +34,38 @@ class ElementNotFoundException(CustomException): ํŠน์ • HTML ์š”์†Œ๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†๋Š” ์˜ˆ์™ธ @:param selector: ์ฐพ์œผ๋ ค๋Š” ์š”์†Œ์˜ CSS ์„ ํƒ์ž """ + def __init__(self, selector: str): super().__init__( status_code=404, detail=f"์š”์†Œ๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค. ์„ ํƒ์ž: {selector}", - code="ELEMENT_NOT_FOUND" + code="ELEMENT_NOT_FOUND", ) + class HtmlParsingException(CustomException): """ HTML ํŒŒ์‹ฑ ์‹คํŒจ ์˜ˆ์™ธ @:param reason: ํŒŒ์‹ฑ ์‹คํŒจ ์ด์œ  """ + def __init__(self, reason: str): super().__init__( status_code=422, detail=f"HTML ํŒŒ์‹ฑ์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค. ์ด์œ : {reason}", - code="HTML_PARSING_ERROR" + code="HTML_PARSING_ERROR", ) + class DataExtractionException(CustomException): """ ๋ฐ์ดํ„ฐ ์ถ”์ถœ ์‹คํŒจ ์˜ˆ์™ธ @:param field: ์ถ”์ถœํ•˜๋ ค๋Š” ๋ฐ์ดํ„ฐ ํ•„๋“œ ๋ชฉ๋ก """ + def __init__(self, field: List[str]): super().__init__( status_code=422, detail=f"๋ฐ์ดํ„ฐ ์ถ”์ถœ์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค. ํ•„๋“œ: {', '.join(field)}", - code="DATA_EXTRACTION_ERROR" + code="DATA_EXTRACTION_ERROR", ) - diff --git a/apps/pre-processing-service/app/errors/CustomException.py b/apps/pre-processing-service/app/errors/CustomException.py index 4c3f84a3..0ae08734 100644 --- a/apps/pre-processing-service/app/errors/CustomException.py +++ b/apps/pre-processing-service/app/errors/CustomException.py @@ -3,42 +3,49 @@ class CustomException(Exception): """ ๊ฐœ๋ฐœ์ž๊ฐ€ ๋น„์ง€๋‹ˆ์Šค ๋กœ์ง์— ๋งž๊ฒŒ ์˜๋„์ ์œผ๋กœ ์—๋Ÿฌ๋ฅผ ์ •์˜ """ + def __init__(self, status_code: int, detail: str, code: str): self.status_code = status_code self.detail = detail self.code = code + # ๊ตฌ์ฒด์ ์ธ ์ปค์Šคํ…€ ์˜ˆ์™ธ ์ •์˜ class ItemNotFoundException(CustomException): """ ์•„์ดํ…œ์„ ์ฐพ์„์ˆ˜ ์—†๋Š” ์˜ˆ์™ธ @:param item_id: ์ฐพ์„์ˆ˜ ์—†๋Š” ์•„์ดํ…œ์˜ ID """ + def __init__(self, item_id: int): super().__init__( status_code=404, detail=f"{item_id}๋ฅผ ์ฐพ์„์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.", - code="ITEM_NOT_FOUND" + code="ITEM_NOT_FOUND", ) + class InvalidItemDataException(CustomException): """ ๋ฐ์ดํ„ฐ ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ ์‹คํŒจ ์˜ˆ์™ธ """ + def __init__(self): super().__init__( status_code=422, detail="๋ฐ์ดํ„ฐ๊ฐ€ ์œ ํšจํ•˜์ง€์•Š์Šต๋‹ˆ๋‹ค..", - code="INVALID_ITEM_DATA" + code="INVALID_ITEM_DATA", ) + class DatabaseConnectionException(CustomException): """ ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์—ฐ๊ฒฐ ์‹คํŒจ ์˜ˆ์™ธ """ + def __init__(self): super().__init__( status_code=500, detail="๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์—ฐ๊ฒฐ์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค.", - code="DATABASE_CONNECTION_ERROR" - ) \ No newline at end of file + code="DATABASE_CONNECTION_ERROR", + ) diff --git a/apps/pre-processing-service/app/errors/handlers.py b/apps/pre-processing-service/app/errors/handlers.py index 1b5caf3d..882a6078 100644 --- a/apps/pre-processing-service/app/errors/handlers.py +++ b/apps/pre-processing-service/app/errors/handlers.py @@ -6,15 +6,18 @@ from .messages import ERROR_MESSAGES, get_error_message from ..errors.CustomException import CustomException + class ErrorBaseModel(BaseModel): """ ๋ชจ๋“  ์—๋Ÿฌ ์‘๋‹ต์˜ ๊ธฐ๋ฐ˜์ด ๋˜๋Š” Pydantic ๋ชจ๋ธ. API์˜ ์—๋Ÿฌ ์‘๋‹ต ํ˜•์‹์„ ํ†ต์ผํ•˜๋Š” ์—ญํ• ์„ ํ•ฉ๋‹ˆ๋‹ค. """ + status_code: int detail: str code: str + # CustomException ํ•ธ๋“ค๋Ÿฌ async def custom_exception_handler(request: Request, exc: CustomException): """ @@ -22,9 +25,7 @@ async def custom_exception_handler(request: Request, exc: CustomException): """ # ๋ณ€๊ฒฝ์ : ErrorBaseModel์„ ์‚ฌ์šฉํ•˜์—ฌ ์‘๋‹ต ๋ณธ๋ฌธ ์ƒ์„ฑ error_content = ErrorBaseModel( - status_code=exc.status_code, - detail=exc.detail, - code=exc.code + status_code=exc.status_code, detail=exc.detail, code=exc.code ) return JSONResponse( status_code=exc.status_code, @@ -41,9 +42,7 @@ async def http_exception_handler(request: Request, exc: StarletteHTTPException): # ๋ณ€๊ฒฝ์ : ErrorBaseModel์„ ์‚ฌ์šฉํ•˜์—ฌ ์‘๋‹ต ๋ณธ๋ฌธ ์ƒ์„ฑ error_content = ErrorBaseModel( - status_code=exc.status_code, - detail=message, - code=f"HTTP_{exc.status_code}" + status_code=exc.status_code, detail=message, code=f"HTTP_{exc.status_code}" ) return JSONResponse( status_code=exc.status_code, @@ -60,7 +59,7 @@ async def validation_exception_handler(request: Request, exc: RequestValidationE base_error = ErrorBaseModel( status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=ERROR_MESSAGES[status.HTTP_422_UNPROCESSABLE_ENTITY], - code="VALIDATION_ERROR" + code="VALIDATION_ERROR", ) # ๋ชจ๋ธ์˜ ๋‚ด์šฉ๊ณผ ์ถ”๊ฐ€์ ์ธ 'details' ํ•„๋“œ๋ฅผ ๊ฒฐํ•ฉ @@ -82,7 +81,7 @@ async def unhandled_exception_handler(request: Request, exc: Exception): error_content = ErrorBaseModel( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=ERROR_MESSAGES[status.HTTP_500_INTERNAL_SERVER_ERROR], - code="INTERNAL_SERVER_ERROR" + code="INTERNAL_SERVER_ERROR", ) return JSONResponse( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, diff --git a/apps/pre-processing-service/app/main.py b/apps/pre-processing-service/app/main.py index d13c523d..9865d845 100644 --- a/apps/pre-processing-service/app/main.py +++ b/apps/pre-processing-service/app/main.py @@ -12,11 +12,7 @@ from app.errors.handlers import * # --- FastAPI ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ --- -app = FastAPI( - title="pre-processing-service", - description="", - version="1.0.0" -) +app = FastAPI(title="pre-processing-service", description="", version="1.0.0") # --- ์˜ˆ์™ธ ํ•ธ๋“ค๋Ÿฌ ๋“ฑ๋ก --- # ๋“ฑ๋ก ์ˆœ์„œ๊ฐ€ ์ค‘์š”ํ•ฉ๋‹ˆ๋‹ค: ๊ตฌ์ฒด์ ์ธ ์˜ˆ์™ธ๋ถ€ํ„ฐ ๋“ฑ๋กํ•˜๊ณ  ๊ฐ€์žฅ ์ผ๋ฐ˜์ ์ธ ์˜ˆ์™ธ(Exception)๋ฅผ ๋งˆ์ง€๋ง‰์— ๋“ฑ๋กํ•ฉ๋‹ˆ๋‹ค. diff --git a/apps/pre-processing-service/app/middleware/BackServiceLoggerDependency.py b/apps/pre-processing-service/app/middleware/BackServiceLoggerDependency.py index bbaa2cfd..d18630f6 100644 --- a/apps/pre-processing-service/app/middleware/BackServiceLoggerDependency.py +++ b/apps/pre-processing-service/app/middleware/BackServiceLoggerDependency.py @@ -121,4 +121,4 @@ # "NAVER_CRAWLING", # track_params=["job_id", "schedule_id", "tag", "category", "startDate", "endDate"], # response_trackers=["keyword", "total_keyword"] -# ) \ No newline at end of file +# ) diff --git a/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py b/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py index edb13f8b..acb120fa 100644 --- a/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py +++ b/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py @@ -9,7 +9,7 @@ import json import time -trace_id_context: ContextVar[str] = ContextVar('trace_id', default="NO_TRACE_ID") +trace_id_context: ContextVar[str] = ContextVar("trace_id", default="NO_TRACE_ID") class ServiceLoggerMiddleware(BaseHTTPMiddleware): @@ -37,14 +37,35 @@ def _default_mappings(self) -> Dict[str, Dict]: return { "/keywords/search": { "service_type": "NAVER_CRAWLING", - "track_params": ["keyword", "category", "startDate", "endDate", "job_id", "schedule_id"], - "response_trackers": ["keyword", "total_keywords", "results_count"] + "track_params": [ + "keyword", + "category", + "startDate", + "endDate", + "job_id", + "schedule_id", + ], + "response_trackers": ["keyword", "total_keywords", "results_count"], }, "/blogs/publish": { - "service_type": "BLOG_PUBLISH", - "track_params": ["tag", "title", "content", "tags", "job_id", "schedule_id", "schedule_his_id"], - "response_trackers": ["job_id", "schedule_id", "schedule_his_id", "status", "metadata"] - } + "service_type": "BLOG_PUBLISH", + "track_params": [ + "tag", + "title", + "content", + "tags", + "job_id", + "schedule_id", + "schedule_his_id", + ], + "response_trackers": [ + "job_id", + "schedule_id", + "schedule_his_id", + "status", + "metadata", + ], + }, } async def dispatch(self, request: Request, call_next): @@ -77,8 +98,12 @@ async def dispatch(self, request: Request, call_next): # 4. ์„ฑ๊ณต ๋กœ๊น… if 200 <= response.status_code < 300: await self._log_success_response( - service_type, trace_id, start_time, param_str, - response, service_config["response_trackers"] + service_type, + trace_id, + start_time, + param_str, + response, + service_config["response_trackers"], ) else: await self._log_error_response( @@ -102,9 +127,11 @@ def _get_service_config(self, url_path: str) -> Optional[Dict]: def _match_pattern(self, url_path: str, pattern: str) -> bool: """URL ํŒจํ„ด ๋งค์นญ (๊ฐ„๋‹จํ•œ ๊ตฌํ˜„, ํ•„์š”์‹œ ์ •๊ทœ์‹์œผ๋กœ ํ™•์žฅ ๊ฐ€๋Šฅ)""" # ์ •ํ™•ํžˆ ์ผ์น˜ํ•˜๊ฑฐ๋‚˜ ํŒจํ„ด์ด ์ ‘๋‘์‚ฌ์ธ ๊ฒฝ์šฐ - return url_path == pattern or url_path.startswith(pattern.rstrip('*')) + return url_path == pattern or url_path.startswith(pattern.rstrip("*")) - async def _extract_params(self, request: Request, track_params: List[str]) -> Dict[str, Any]: + async def _extract_params( + self, request: Request, track_params: List[str] + ) -> Dict[str, Any]: """์š”์ฒญ์—์„œ ์ถ”์  ํŒŒ๋ผ๋ฏธํ„ฐ ์ถ”์ถœ""" params = {} @@ -137,9 +164,15 @@ async def _extract_params(self, request: Request, track_params: List[str]) -> Di return params - async def _log_success_response(self, service_type: str, trace_id: str, - start_time: float, param_str: str, - response: Response, response_trackers: List[str]): + async def _log_success_response( + self, + service_type: str, + trace_id: str, + start_time: float, + param_str: str, + response: Response, + response_trackers: List[str], + ): """์„ฑ๊ณต ์‘๋‹ต ๋กœ๊น…""" duration = time.time() - start_time @@ -147,16 +180,16 @@ async def _log_success_response(self, service_type: str, trace_id: str, f"[{service_type}_SUCCESS]", f"trace_id={trace_id}", f"execution_time={duration:.4f}s{param_str}", - f"status_code={response.status_code}" + f"status_code={response.status_code}", ] # ์‘๋‹ต ๋ฐ์ดํ„ฐ์—์„œ ์ถ”์  ์ •๋ณด ์ถ”์ถœ if isinstance(response, JSONResponse) and response_trackers: try: # JSONResponse body ์ฝ๊ธฐ - if hasattr(response, 'body'): + if hasattr(response, "body"): response_data = json.loads(response.body.decode()) - elif hasattr(response, 'content'): + elif hasattr(response, "content"): response_data = response.content else: response_data = None @@ -167,7 +200,9 @@ async def _log_success_response(self, service_type: str, trace_id: str, if tracker in response_data: value = response_data[tracker] if isinstance(value, dict): - response_params.append(f"{tracker}_keys={list(value.keys())}") + response_params.append( + f"{tracker}_keys={list(value.keys())}" + ) response_params.append(f"{tracker}_count={len(value)}") elif isinstance(value, list): response_params.append(f"{tracker}_count={len(value)}") @@ -182,8 +217,14 @@ async def _log_success_response(self, service_type: str, trace_id: str, logger.info(" ".join(log_parts)) - async def _log_error_response(self, service_type: str, trace_id: str, - start_time: float, param_str: str, response: Response): + async def _log_error_response( + self, + service_type: str, + trace_id: str, + start_time: float, + param_str: str, + response: Response, + ): """์—๋Ÿฌ ์‘๋‹ต ๋กœ๊น…""" duration = time.time() - start_time logger.error( @@ -192,12 +233,18 @@ async def _log_error_response(self, service_type: str, trace_id: str, f"status_code={response.status_code}" ) - async def _log_exception(self, service_type: str, trace_id: str, - start_time: float, param_str: str, exception: Exception): + async def _log_exception( + self, + service_type: str, + trace_id: str, + start_time: float, + param_str: str, + exception: Exception, + ): """์˜ˆ์™ธ ๋กœ๊น…""" duration = time.time() - start_time logger.error( f"[{service_type}_EXCEPTION] trace_id={trace_id} " f"execution_time={duration:.4f}s{param_str} " f"exception={str(exception)}" - ) \ No newline at end of file + ) diff --git a/apps/pre-processing-service/app/middleware/logging.py b/apps/pre-processing-service/app/middleware/logging.py index 29cbe738..9a8cb6a0 100644 --- a/apps/pre-processing-service/app/middleware/logging.py +++ b/apps/pre-processing-service/app/middleware/logging.py @@ -1,4 +1,3 @@ - import time from fastapi import Request from loguru import logger @@ -12,7 +11,9 @@ async def dispatch(self, request: Request, call_next): # 1. ์š”์ฒญ ์‹œ์ž‘ ๋กœ๊ทธ logger.info( "์š”์ฒญ ์‹œ์ž‘: IP='{}' ๋ฉ”์„œ๋“œ='{}' URL='{}'", - request.client.host, request.method, request.url.path + request.client.host, + request.method, + request.url.path, ) try: @@ -23,7 +24,10 @@ async def dispatch(self, request: Request, call_next): process_time = time.time() - start_time logger.info( "์š”์ฒญ ์„ฑ๊ณต: ๋ฉ”์„œ๋“œ='{}' URL='{}' ์ƒํƒœ์ฝ”๋“œ='{}' (์ฒ˜๋ฆฌ ์‹œ๊ฐ„: {:.4f}s)", - request.method, request.url.path, response.status_code, process_time + request.method, + request.url.path, + response.status_code, + process_time, ) return response @@ -32,7 +36,11 @@ async def dispatch(self, request: Request, call_next): process_time = time.time() - start_time logger.error( "์š”์ฒญ ์‹คํŒจ: IP='{}' ๋ฉ”์„œ๋“œ='{}' URL='{}' ์˜ˆ์™ธ='{}' (์ฒ˜๋ฆฌ ์‹œ๊ฐ„: {:.4f}s)", - request.client.host, request.method, request.url.path, e, process_time + request.client.host, + request.method, + request.url.path, + e, + process_time, ) # ์˜ˆ์™ธ๋ฅผ ๋‹ค์‹œ ๋ฐœ์ƒ์‹œ์ผœ FastAPI์˜ ๊ธฐ๋ณธ ํ•ธ๋“ค๋Ÿฌ๊ฐ€ ์ฒ˜๋ฆฌํ•˜๋„๋ก ํ•จ - raise \ No newline at end of file + raise diff --git a/apps/pre-processing-service/app/model/schemas.py b/apps/pre-processing-service/app/model/schemas.py index f206f3e9..61720cb6 100644 --- a/apps/pre-processing-service/app/model/schemas.py +++ b/apps/pre-processing-service/app/model/schemas.py @@ -5,96 +5,154 @@ # ๊ธฐ๋ณธ ์š”์ฒญ class RequestBase(BaseModel): - job_id: int - schedule_id: int - schedule_his_id: Optional[int] = None + job_id: int = Field( + ..., title="์ž‘์—… ID", description="ํ˜„์žฌ ์‹คํ–‰ ์ค‘์ธ ์ž‘์—…์˜ ๊ณ ์œ  ์‹๋ณ„์ž" + ) + schedule_id: int = Field( + ..., title="์Šค์ผ€์ค„ ID", description="์˜ˆ์•ฝ๋œ ์Šค์ผ€์ค„์˜ ๊ณ ์œ  ์‹๋ณ„์ž" + ) + schedule_his_id: Optional[int] = Field( + None, title="์Šค์ผ€์ค„ ํžˆ์Šคํ† ๋ฆฌ ID", description="์Šค์ผ€์ค„ ์‹คํ–‰ ์ด๋ ฅ์˜ ๊ณ ์œ  ์‹๋ณ„์ž" + ) + # ๊ธฐ๋ณธ ์‘๋‹ต class ResponseBase(BaseModel): - job_id: int - schedule_id: int - schedule_his_id: Optional[int] = None - status: str + job_id: int = Field( + ..., title="์ž‘์—… ID", description="ํ˜„์žฌ ์‹คํ–‰ ์ค‘์ธ ์ž‘์—…์˜ ๊ณ ์œ  ์‹๋ณ„์ž" + ) + schedule_id: int = Field( + ..., title="์Šค์ผ€์ค„ ID", description="์˜ˆ์•ฝ๋œ ์Šค์ผ€์ค„์˜ ๊ณ ์œ  ์‹๋ณ„์ž" + ) + schedule_his_id: Optional[int] = Field( + None, title="์Šค์ผ€์ค„ ํžˆ์Šคํ† ๋ฆฌ ID", description="์Šค์ผ€์ค„ ์‹คํ–‰ ์ด๋ ฅ์˜ ๊ณ ์œ  ์‹๋ณ„์ž" + ) + status: str = Field(..., title="์ƒํƒœ", description="์š”์ฒญ ์ฒ˜๋ฆฌ ์ƒํƒœ") + # ๋„ค์ด๋ฒ„ ํ‚ค์›Œ๋“œ ์ถ”์ถœ class RequestNaverSearch(RequestBase): - tag: str - category: Optional[str] = None - start_date: Optional[str] = None - end_date: Optional[str] = None + tag: str = Field(..., title="ํƒœ๊ทธ", description="๋ฐ์ดํ„ฐ๋žฉ/์Šคํ† ์–ด ํƒœ๊ทธ ๊ตฌ๋ถ„") + category: Optional[str] = Field( + None, title="์นดํ…Œ๊ณ ๋ฆฌ", description="๊ฒ€์ƒ‰ํ•  ์นดํ…Œ๊ณ ๋ฆฌ" + ) + start_date: Optional[str] = Field( + None, title="์‹œ์ž‘์ผ", description="๊ฒ€์ƒ‰ ์‹œ์ž‘ ๋‚ ์งœ (YYYY-MM-DD)" + ) + end_date: Optional[str] = Field( + None, title="์ข…๋ฃŒ์ผ", description="๊ฒ€์ƒ‰ ์ข…๋ฃŒ ๋‚ ์งœ (YYYY-MM-DD)" + ) + class ResponseNaverSearch(ResponseBase): - category: Optional[str] = None - keyword: str - total_keyword: Dict[int, str] + category: Optional[str] = Field(None, title="์นดํ…Œ๊ณ ๋ฆฌ", description="๊ฒ€์ƒ‰ ์นดํ…Œ๊ณ ๋ฆฌ") + keyword: str = Field(..., title="ํ‚ค์›Œ๋“œ", description="๊ฒ€์ƒ‰์— ์‚ฌ์šฉ๋œ ํ‚ค์›Œ๋“œ") + total_keyword: Dict[int, str] = Field( + ..., title="์ด ํ‚ค์›Œ๋“œ", description="ํ‚ค์›Œ๋“œ๋ณ„ ์ด ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ" + ) + # 2๋‹จ๊ณ„: ๊ฒ€์ƒ‰ class RequestSadaguSearch(RequestBase): - keyword: str + keyword: str = Field(..., title="๊ฒ€์ƒ‰ ํ‚ค์›Œ๋“œ", description="์ƒํ’ˆ์„ ๊ฒ€์ƒ‰ํ•  ํ‚ค์›Œ๋“œ") + class ResponseSadaguSearch(ResponseBase): - keyword: str - search_results: List[Dict] + keyword: str = Field(..., title="๊ฒ€์ƒ‰ ํ‚ค์›Œ๋“œ", description="๊ฒ€์ƒ‰์— ์‚ฌ์šฉ๋œ ํ‚ค์›Œ๋“œ") + search_results: List[Dict] = Field( + ..., title="๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ", description="๊ฒ€์ƒ‰๋œ ์ƒํ’ˆ ๋ชฉ๋ก" + ) + # 3๋‹จ๊ณ„: ๋งค์นญ class RequestSadaguMatch(RequestBase): - keyword: str - search_results: List[Dict] + keyword: str = Field(..., title="๋งค์นญ ํ‚ค์›Œ๋“œ", description="์ƒํ’ˆ๊ณผ ๋งค์นญํ•  ํ‚ค์›Œ๋“œ") + search_results: List[Dict] = Field( + ..., title="๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ", description="์ด์ „ ๋‹จ๊ณ„์—์„œ ๊ฒ€์ƒ‰๋œ ์ƒํ’ˆ ๋ชฉ๋ก" + ) + class ResponseSadaguMatch(ResponseBase): - keyword: str - matched_products: List[Dict] + keyword: str = Field(..., title="๋งค์นญ ํ‚ค์›Œ๋“œ", description="๋งค์นญ์— ์‚ฌ์šฉ๋œ ํ‚ค์›Œ๋“œ") + matched_products: List[Dict] = Field( + ..., title="๋งค์นญ๋œ ์ƒํ’ˆ", description="ํ‚ค์›Œ๋“œ์™€ ๋งค์นญ๋œ ์ƒํ’ˆ ๋ชฉ๋ก" + ) + # 4๋‹จ๊ณ„: ์œ ์‚ฌ๋„ class RequestSadaguSimilarity(RequestBase): - keyword: str - matched_products: List[Dict] - search_results: Optional[List[Dict]] = None # 3๋‹จ๊ณ„์—์„œ ๋งค์นญ ์‹คํŒจ์‹œ ํด๋ฐฑ์šฉ + keyword: str = Field( + ..., title="์œ ์‚ฌ๋„ ๋ถ„์„ ํ‚ค์›Œ๋“œ", description="์œ ์‚ฌ๋„ ๋ถ„์„ํ•  ํ‚ค์›Œ๋“œ" + ) + matched_products: List[Dict] = Field( + ..., title="๋งค์นญ๋œ ์ƒํ’ˆ", description="์ด์ „ ๋‹จ๊ณ„์—์„œ ๋งค์นญ๋œ ์ƒํ’ˆ ๋ชฉ๋ก" + ) + search_results: Optional[List[Dict]] = Field( + None, + title="๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ", + description="๋งค์นญ ์‹คํŒจ์‹œ ์‚ฌ์šฉํ•  ์ „์ฒด ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ (ํด๋ฐฑ์šฉ)", + ) + class ResponseSadaguSimilarity(ResponseBase): - keyword: str - selected_product: Optional[Dict] = None - reason: Optional[str] = None + keyword: str = Field( + ..., title="๋ถ„์„ ํ‚ค์›Œ๋“œ", description="์œ ์‚ฌ๋„ ๋ถ„์„์— ์‚ฌ์šฉ๋œ ํ‚ค์›Œ๋“œ" + ) + selected_product: Optional[Dict] = Field( + None, title="์„ ํƒ๋œ ์ƒํ’ˆ", description="์œ ์‚ฌ๋„ ๋ถ„์„ ๊ฒฐ๊ณผ ์„ ํƒ๋œ ์ƒํ’ˆ" + ) + reason: Optional[str] = Field( + None, title="์„ ํƒ ์ด์œ ", description="์ƒํ’ˆ ์„ ํƒ ๊ทผ๊ฑฐ ๋ฐ ์ ์ˆ˜ ์ •๋ณด" + ) + # ์‚ฌ๋‹ค๊ตฌ๋ชฐ ํฌ๋กค๋ง -class RequestSadaguCrawl(BaseModel): - job_id: int = Field(..., description="์ž‘์—… ID") - schedule_id: int = Field(..., description="์Šค์ผ€์ค„ ID") - schedule_his_id: int = Field(..., description="์Šค์ผ€์ค„ ํžˆ์Šคํ† ๋ฆฌ ID") - tag: str = Field(..., description="ํฌ๋กค๋ง ํƒœ๊ทธ (์˜ˆ: 'detail')") - product_url: HttpUrl = Field(..., description="ํฌ๋กค๋งํ•  ์ƒํ’ˆ์˜ URL") - use_selenium: bool = Field(default=True, description="Selenium ์‚ฌ์šฉ ์—ฌ๋ถ€") - include_images: bool = Field(default=False, description="์ด๋ฏธ์ง€ ์ •๋ณด ํฌํ•จ ์—ฌ๋ถ€") - -class ResponseSadaguCrawl(BaseModel): - job_id: int - schedule_id: int - schedule_his_id: int - tag: str - product_url: str - use_selenium: bool - include_images: bool - product_detail: Optional[Dict] = None - status: str - crawled_at: Optional[str] = None - -# ๋ธ”๋กœ๊ทธ ์ƒ์„ฑ +class RequestSadaguCrawl(RequestBase): + tag: str = Field( + ..., + title="ํฌ๋กค๋ง ํƒœ๊ทธ", + description="ํฌ๋กค๋ง ์œ ํ˜•์„ ๊ตฌ๋ถ„ํ•˜๋Š” ํƒœ๊ทธ (์˜ˆ: 'detail')", + ) + product_url: HttpUrl = Field( + ..., title="์ƒํ’ˆ URL", description="ํฌ๋กค๋งํ•  ์ƒํ’ˆ ํŽ˜์ด์ง€์˜ URL" + ) + + +class ResponseSadaguCrawl(ResponseBase): + tag: str = Field(..., title="ํฌ๋กค๋ง ํƒœ๊ทธ", description="ํฌ๋กค๋ง ์œ ํ˜• ํƒœ๊ทธ") + product_url: str = Field(..., title="์ƒํ’ˆ URL", description="ํฌ๋กค๋ง๋œ ์ƒํ’ˆ URL") + product_detail: Optional[Dict] = Field( + None, title="์ƒํ’ˆ ์ƒ์„ธ์ •๋ณด", description="ํฌ๋กค๋ง๋œ ์ƒํ’ˆ์˜ ์ƒ์„ธ ์ •๋ณด" + ) + crawled_at: Optional[str] = Field( + None, title="ํฌ๋กค๋ง ์‹œ๊ฐ„", description="ํฌ๋กค๋ง ์™„๋ฃŒ ์‹œ๊ฐ„" + ) + + +# ๋ธ”๋กœ๊ทธ ์ฝ˜ํ…์ธ  ์ƒ์„ฑ class RequestBlogCreate(RequestBase): - tag: str - category: str + pass + class ResponseBlogCreate(ResponseBase): pass + # ๋ธ”๋กœ๊ทธ ๋ฐฐํฌ class RequestBlogPublish(RequestBase): - tag: str - category: str + tag: str = Field(..., title="๋ธ”๋กœ๊ทธ ํƒœ๊ทธ", description="๋ธ”๋กœ๊ทธ ํ”Œ๋žซํผ ์ข…๋ฅ˜") + blog_id: str = Field(..., description="๋ธ”๋กœ๊ทธ ์•„์ด๋””") + blog_pw: str = Field(..., description="๋ธ”๋กœ๊ทธ ๋น„๋ฐ€๋ฒˆํ˜ธ") + post_title: str = Field(..., description="ํฌ์ŠคํŒ… ์ œ๋ชฉ") + post_content: str = Field(..., description="ํฌ์ŠคํŒ… ๋‚ด์šฉ") + post_tags: List[str] = Field(default=[], description="ํฌ์ŠคํŒ… ํƒœ๊ทธ ๋ชฉ๋ก") - # ์ž„์˜๋กœ ์ถ”๊ฐ€ - title: str - content: str - tags: List[str] class ResponseBlogPublish(ResponseBase): - metadata: Optional[Dict[str, Any]] \ No newline at end of file + # ๋””๋ฒ„๊น… ์šฉ + metadata: Optional[Dict[str, Any]] = Field( + None, description="ํฌ์ŠคํŒ… ๊ด€๋ จ ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ" + ) + + # ํ”„๋กœ๋•์…˜ ์šฉ + # post_url: str = Field(..., description="ํฌ์ŠคํŒ… URL") diff --git a/apps/pre-processing-service/app/service/blog/base_blog_post_service.py b/apps/pre-processing-service/app/service/blog/base_blog_post_service.py index 55aa34e9..ff4b2754 100644 --- a/apps/pre-processing-service/app/service/blog/base_blog_post_service.py +++ b/apps/pre-processing-service/app/service/blog/base_blog_post_service.py @@ -1,34 +1,61 @@ from abc import ABC, abstractmethod -from typing import Dict +from typing import Dict, List, Optional from app.utils.crawling_util import CrawlingUtil from app.errors.BlogPostingException import * from app.errors.CrawlingException import * + class BaseBlogPostService(ABC): """ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŒ… ์„œ๋น„์Šค ์ถ”์ƒ ํด๋ž˜์Šค """ - def __init__(self): + def __init__(self, config_file="blog_config.json"): """๊ณตํ†ต ์ดˆ๊ธฐํ™” ๋กœ์ง""" - try: - self.crawling_service = CrawlingUtil() - self.web_driver = self.crawling_service.get_driver() - self.wait_driver = self.crawling_service.get_wait() - except Exception: - raise WebDriverConnectionException() + # Selenium ๊ธฐ๋ฐ˜ ์„œ๋น„์Šค๋ฅผ ์œ„ํ•œ ์ดˆ๊ธฐํ™” + if self._requires_webdriver(): + try: + self.crawling_service = CrawlingUtil() + self.web_driver = self.crawling_service.get_driver() + self.wait_driver = self.crawling_service.get_wait() + except Exception: + raise WebDriverConnectionException() + else: + # API ๊ธฐ๋ฐ˜ ์„œ๋น„์Šค์˜ ๊ฒฝ์šฐ WebDriver๊ฐ€ ํ•„์š” ์—†์Œ + self.crawling_service = None + self.web_driver = None + self.wait_driver = None + + # API ๊ธฐ๋ฐ˜ ์„œ๋น„์Šค๋ฅผ ์œ„ํ•œ ์ดˆ๊ธฐํ™” + self.config_file = config_file + self.config = {} + self.current_upload_account = None + + # API ๊ด€๋ จ ์†์„ฑ๋“ค (์‚ฌ์šฉํ•˜์ง€ ์•Š๋Š” ์„œ๋น„์Šค์—์„œ๋Š” None์œผ๋กœ ์œ ์ง€) + self.blogger_service = None + self.blog_id = None + self.scopes = None self._load_config() + def _requires_webdriver(self) -> bool: + """ + ์„œ๋ธŒํด๋ž˜์Šค์—์„œ WebDriver๊ฐ€ ํ•„์š”ํ•œ์ง€ ์—ฌ๋ถ€๋ฅผ ๋ฐ˜ํ™˜ + ๊ธฐ๋ณธ๊ฐ’์€ True (Selenium ๊ธฐ๋ฐ˜), API ๊ธฐ๋ฐ˜ ์„œ๋น„์Šค์—์„œ๋Š” False๋กœ ์˜ค๋ฒ„๋ผ์ด๋“œ + """ + return True + @abstractmethod def _load_config(self) -> None: """ํ”Œ๋žซํผ๋ณ„ ์„ค์ • ๋กœ๋“œ""" pass - @abstractmethod def _login(self) -> None: - """ํ”Œ๋žซํผ๋ณ„ ๋กœ๊ทธ์ธ ๊ตฌํ˜„""" + """ + ํ”Œ๋žซํผ๋ณ„ ๋กœ๊ทธ์ธ ๊ตฌํ˜„ (API ๊ธฐ๋ฐ˜ ์„œ๋น„์Šค์˜ ๊ฒฝ์šฐ ์ธ์ฆ์œผ๋กœ ๋Œ€์ฒด) + ๊ธฐ๋ณธ ๊ตฌํ˜„์€ ์•„๋ฌด๊ฒƒ๋„ ํ•˜์ง€ ์•Š์Œ (API ์„œ๋น„์Šค์šฉ) + """ pass @abstractmethod @@ -47,21 +74,16 @@ def _get_platform_name(self) -> str: pass @abstractmethod - def _validate_content(self, title: str, content: str, tags: Optional[List[str]] = None) -> None: + def _validate_content( + self, title: str, content: str, tags: Optional[List[str]] = None + ) -> None: """ ๊ณตํ†ต ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ ๋กœ์ง :param title: ํฌ์ŠคํŠธ ์ œ๋ชฉ :param content: ํฌ์ŠคํŠธ ๋‚ด์šฉ :param tags: ํฌ์ŠคํŠธ ํƒœ๊ทธ ๋ฆฌ์ŠคํŠธ """ - # if not title or not title.strip(): - # raise BlogContentValidationException("title", "์ œ๋ชฉ์ด ๋น„์–ด์žˆ์Šต๋‹ˆ๋‹ค") - # - # if not content or not content.strip(): - # raise BlogContentValidationException("content", "๋‚ด์šฉ์ด ๋น„์–ด์žˆ์Šต๋‹ˆ๋‹ค") - # - # if tags is None: - # raise BlogContentValidationException("tags", "ํƒœ๊ทธ๊ฐ€ ๋น„์–ด์žˆ์Šต๋‹ˆ๋‹ค") + pass def post_content(self, title: str, content: str, tags: List[str] = None) -> Dict: """ @@ -74,7 +96,7 @@ def post_content(self, title: str, content: str, tags: List[str] = None) -> Dict # 1. ์ฝ˜ํ…์ธ  ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ self._validate_content(title, content, tags) - # 2. ๋กœ๊ทธ์ธ + # 2. ๋กœ๊ทธ์ธ (Selenium ๊ธฐ๋ฐ˜) ๋˜๋Š” ์ธ์ฆ (API ๊ธฐ๋ฐ˜) self._login() # 3. ํฌ์ŠคํŠธ ์ž‘์„ฑ ๋ฐ ๋ฐœํ–‰ @@ -85,10 +107,10 @@ def post_content(self, title: str, content: str, tags: List[str] = None) -> Dict "platform": self._get_platform_name(), "title": title, "content_length": len(content), - "tags": tags or [] + "tags": tags or [], } def __del__(self): """๊ณตํ†ต ๋ฆฌ์†Œ์Šค ์ •๋ฆฌ""" - if hasattr(self, 'web_driver') and self.web_driver: - self.web_driver.quit() \ No newline at end of file + if hasattr(self, "web_driver") and self.web_driver: + self.web_driver.quit() diff --git a/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py b/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py new file mode 100644 index 00000000..07e337d9 --- /dev/null +++ b/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py @@ -0,0 +1,143 @@ +import json +import os +import pickle +from typing import Dict, List, Optional + +from googleapiclient.discovery import build +from google.auth.transport.requests import Request +from google_auth_oauthlib.flow import InstalledAppFlow + +from app.errors.BlogPostingException import * +from app.service.blog.base_blog_post_service import BaseBlogPostService + + +class BloggerBlogPostService(BaseBlogPostService): + """ + Blogger API๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ํฌ์ŠคํŒ…์„ ๊ด€๋ฆฌํ•˜๋Š” ์„œ๋น„์Šค + """ + + def __init__(self, config_file="blog_config.json"): + # ๋ถ€๋ชจ ํด๋ž˜์Šค ์ƒ์„ฑ์ž ํ˜ธ์ถœ (WebDriver๋Š” None์œผ๋กœ ์„ค์ •๋จ) + super().__init__() + + # API ๊ด€๋ จ ์ถ”๊ฐ€ ์ดˆ๊ธฐํ™” + self.config_file = config_file + self.blogger_service = None + self.blog_id = None + self.scopes = ["https://www.googleapis.com/auth/blogger"] + + def _requires_webdriver(self) -> bool: + """API ๊ธฐ๋ฐ˜ ์„œ๋น„์Šค๋Š” WebDriver๊ฐ€ ํ•„์š”ํ•˜์ง€ ์•Š์Œ""" + return False + + def _load_config(self) -> None: + """ + ํ”Œ๋žซํผ๋ณ„ ์„ค์ • ๋กœ๋“œ + """ + try: + with open(self.config_file, "r", encoding="utf-8") as f: + self.config = json.load(f) + self.current_upload_account = self.config["upload_account"] + except FileNotFoundError: + default_config = { + "upload_account": "your_account@gmail.com", + "credentials": "credentials.json", + } + with open(self.config_file, "w", encoding="utf-8") as f: + json.dump(default_config, f, indent=2) + self.config = default_config + self.current_upload_account = self.config["upload_account"] + + def _login(self) -> None: + """ + API ์ธ์ฆ (Selenium์˜ ๋กœ๊ทธ์ธ์„ ๋Œ€์ฒด) + """ + self._authenticate_api() + + def _authenticate_api(self): + """ + API ์ธ์ฆ ๋ฐ ์„œ๋น„์Šค ๊ฐ์ฒด ์ƒ์„ฑ + """ + token_file = f"token_{self.current_upload_account.replace('@', '_').replace('.', '_')}.pkl" + + try: + creds = None + if os.path.exists(token_file): + with open(token_file, "rb") as token: + creds = pickle.load(token) + + if not creds or not creds.valid: + if creds and creds.expired and creds.refresh_token: + creds.refresh(Request()) + else: + print(f"์ƒˆ API ์ธ์ฆ์ด ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค: {self.current_upload_account}") + flow = InstalledAppFlow.from_client_secrets_file( + self.config["credentials"], self.scopes + ) + creds = flow.run_local_server(port=0) + + with open(token_file, "wb") as token: + pickle.dump(creds, token) + + self.blogger_service = build("blogger", "v3", credentials=creds) + + blogs = self.blogger_service.blogs().listByUser(userId="self").execute() + if blogs.get("items"): + self.blog_id = blogs["items"][0]["id"] + print(f"API ์„ค์ • ์™„๋ฃŒ - ๋ธ”๋กœ๊ทธ: {blogs['items'][0]['name']}") + return True + else: + print("๋ธ”๋กœ๊ทธ๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.") + return False + except Exception as e: + print(f"API ์ธ์ฆ/์„ค์ • ์‹คํŒจ: {e}") + raise BloggerApiException("API ์ธ์ฆ ์‹คํŒจ", e) + + def _write_content(self, title: str, content: str, tags: List[str] = None) -> None: + """ + API๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ํฌ์ŠคํŒ… ์ž‘์„ฑ + """ + if not self.blogger_service or not self.blog_id: + self._authenticate_api() + + post_data = {"title": title, "content": content, "labels": tags or []} + + try: + result = ( + self.blogger_service.posts() + .insert(blogId=self.blog_id, body=post_data) + .execute() + ) + + print(f"ํฌ์ŠคํŠธ ์ƒ์„ฑ ์™„๋ฃŒ: {result.get('url')}") + except Exception as e: + raise BlogPostPublishException( + platform="Blogger", reason="API ํ†ต์‹  ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค." + ) from e + + def _get_platform_name(self) -> str: + """ํ”Œ๋žซํผ ์ด๋ฆ„ ๋ฐ˜ํ™˜""" + return "Blogger" + + def _validate_content( + self, title: str, content: str, tags: Optional[List[str]] = None + ) -> None: + """ + ๊ณตํ†ต ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ ๋กœ์ง + """ + if not title or not title.strip(): + raise BlogContentValidationException("title", "์ œ๋ชฉ์ด ๋น„์–ด์žˆ์Šต๋‹ˆ๋‹ค") + + if not content or not content.strip(): + raise BlogContentValidationException("content", "๋‚ด์šฉ์ด ๋น„์–ด์žˆ์Šต๋‹ˆ๋‹ค") + + # ํƒœ๊ทธ ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ๋„ ํ•„์š”์— ๋”ฐ๋ผ ์ถ”๊ฐ€ + # if not tags or not isinstance(tags, list): + # raise BlogContentValidationException("tags", "ํƒœ๊ทธ๋Š” ๋ฆฌ์ŠคํŠธ ํ˜•ํƒœ์—ฌ์•ผ ํ•ฉ๋‹ˆ๋‹ค") + + def __del__(self): + """ + ๋ฆฌ์†Œ์Šค ์ •๋ฆฌ - API ๊ธฐ๋ฐ˜ ์„œ๋น„์Šค๋Š” ๋ณ„๋„ ์ •๋ฆฌ ๋ถˆํ•„์š” + ๋ถ€๋ชจ ํด๋ž˜์Šค์˜ __del__์ด WebDriver ์ •๋ฆฌ๋ฅผ ์ฒ˜๋ฆฌ + """ + super().__del__() diff --git a/apps/pre-processing-service/app/service/blog/naver_blog_post_service.py b/apps/pre-processing-service/app/service/blog/naver_blog_post_service.py index 0aaf9431..0e33a9fd 100644 --- a/apps/pre-processing-service/app/service/blog/naver_blog_post_service.py +++ b/apps/pre-processing-service/app/service/blog/naver_blog_post_service.py @@ -11,6 +11,7 @@ from app.errors.BlogPostingException import * from app.service.blog.base_blog_post_service import BaseBlogPostService + class NaverBlogPostService(BaseBlogPostService): """๋„ค์ด๋ฒ„ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŒ… ์„œ๋น„์Šค ๊ตฌํ˜„""" @@ -25,7 +26,9 @@ def _load_config(self) -> None: def _get_platform_name(self) -> str: return "NAVER_BLOG" - def _validate_content(self, title: str, content: str, tags: Optional[List[str]] = None) -> None: + def _validate_content( + self, title: str, content: str, tags: Optional[List[str]] = None + ) -> None: """๊ณตํ†ต ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ ๋กœ์ง""" if not title or not title.strip(): @@ -53,7 +56,7 @@ def _login(self) -> None: pyperclip.copy(self.id) time.sleep(1) - id_input.send_keys(Keys.COMMAND, 'v') + id_input.send_keys(Keys.COMMAND, "v") time.sleep(1) # ๋น„๋ฐ€๋ฒˆํ˜ธ ์ž…๋ ฅ @@ -66,7 +69,7 @@ def _login(self) -> None: pyperclip.copy(self.password) time.sleep(1) - password_input.send_keys(Keys.COMMAND, 'v') + password_input.send_keys(Keys.COMMAND, "v") time.sleep(1) # ๋กœ๊ทธ์ธ ๋ฒ„ํŠผ ํด๋ฆญ @@ -84,7 +87,9 @@ def _login(self) -> None: except TimeoutException: raise PageLoadTimeoutException(self.login_url) except WebDriverConnectionException: - raise BlogServiceUnavailableException("๋„ค์ด๋ฒ„ ๋ธ”๋กœ๊ทธ", "๋„คํŠธ์›Œํฌ ์—ฐ๊ฒฐ ์˜ค๋ฅ˜ ๋˜๋Š” ํŽ˜์ด์ง€ ๋กœ๋“œ ์‹คํŒจ") + raise BlogServiceUnavailableException( + "๋„ค์ด๋ฒ„ ๋ธ”๋กœ๊ทธ", "๋„คํŠธ์›Œํฌ ์—ฐ๊ฒฐ ์˜ค๋ฅ˜ ๋˜๋Š” ํŽ˜์ด์ง€ ๋กœ๋“œ ์‹คํŒจ" + ) except Exception as e: raise BlogLoginException("๋„ค์ด๋ฒ„ ๋ธ”๋กœ๊ทธ", f"์˜ˆ์ƒ์น˜ ๋ชปํ•œ ์˜ค๋ฅ˜: {str(e)}") @@ -102,7 +107,9 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No # ๊ธฐ์กด ์ž‘์„ฑ ๊ธ€ ํŒ์—… ๋‹ซ๊ธฐ (์žˆ์„ ๊ฒฝ์šฐ) try: cancel = self.wait_driver.until( - EC.element_to_be_clickable((By.CSS_SELECTOR, '.se-popup-button.se-popup-button-cancel')) + EC.element_to_be_clickable( + (By.CSS_SELECTOR, ".se-popup-button.se-popup-button-cancel") + ) ) cancel.click() time.sleep(1) @@ -112,10 +119,13 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No # ์ œ๋ชฉ ์ž…๋ ฅ try: title_element = self.wait_driver.until( - EC.element_to_be_clickable((By.CSS_SELECTOR, '.se-placeholder.__se_placeholder.se-fs32')) + EC.element_to_be_clickable( + (By.CSS_SELECTOR, ".se-placeholder.__se_placeholder.se-fs32") + ) ) - ActionChains(self.web_driver).move_to_element(title_element).click().pause(0.2).send_keys( - title).perform() + ActionChains(self.web_driver).move_to_element( + title_element + ).click().pause(0.2).send_keys(title).perform() time.sleep(1) except TimeoutException: raise BlogElementInteractionException("์ œ๋ชฉ ์ž…๋ ฅ ํ•„๋“œ", "์ œ๋ชฉ ์ž…๋ ฅ") @@ -123,10 +133,15 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No # ๋ณธ๋ฌธ ์ž…๋ ฅ try: body_element = self.wait_driver.until( - EC.element_to_be_clickable((By.CSS_SELECTOR, '.se-component.se-text.se-l-default')) + EC.element_to_be_clickable( + (By.CSS_SELECTOR, ".se-component.se-text.se-l-default") + ) ) - ActionChains(self.web_driver).move_to_element(body_element).click().pause(0.2) \ - .send_keys(content).pause(0.2).send_keys(Keys.ENTER).perform() + ActionChains(self.web_driver).move_to_element( + body_element + ).click().pause(0.2).send_keys(content).pause(0.2).send_keys( + Keys.ENTER + ).perform() time.sleep(1) except TimeoutException: raise BlogElementInteractionException("๋ณธ๋ฌธ ์ž…๋ ฅ ํ•„๋“œ", "๋ณธ๋ฌธ ์ž…๋ ฅ") @@ -134,7 +149,9 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No # ๋ฐœํ–‰ ๋ฒ„ํŠผ ํด๋ฆญ try: publish_btn = self.wait_driver.until( - EC.element_to_be_clickable((By.XPATH, "//button[.//span[normalize-space()='๋ฐœํ–‰']]")) + EC.element_to_be_clickable( + (By.XPATH, "//button[.//span[normalize-space()='๋ฐœํ–‰']]") + ) ) try: publish_btn.click() @@ -148,7 +165,9 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No if tags: try: tag_input = self.wait_driver.until( - EC.element_to_be_clickable((By.CSS_SELECTOR, "input[placeholder*='ํƒœ๊ทธ']")) + EC.element_to_be_clickable( + (By.CSS_SELECTOR, "input[placeholder*='ํƒœ๊ทธ']") + ) ) for tag in tags: tag_input.send_keys(tag) @@ -161,8 +180,12 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No try: time.sleep(1) final_btn = self.wait_driver.until( - EC.element_to_be_clickable((By.XPATH, - "//div[contains(@class,'layer') or contains(@class,'popup') or @role='dialog']//*[self::button or self::a][.//span[normalize-space()='๋ฐœํ–‰']]")) + EC.element_to_be_clickable( + ( + By.XPATH, + "//div[contains(@class,'layer') or contains(@class,'popup') or @role='dialog']//*[self::button or self::a][.//span[normalize-space()='๋ฐœํ–‰']]", + ) + ) ) try: final_btn.click() @@ -178,7 +201,7 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No EC.url_contains("PostView.naver"), EC.url_contains("postList"), EC.url_contains("postList.naver"), - EC.url_contains("entry.naver") + EC.url_contains("entry.naver"), ) ) except TimeoutException: @@ -189,6 +212,10 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No except TimeoutException: raise PageLoadTimeoutException(self.post_content_url) except WebDriverConnectionException: - raise BlogServiceUnavailableException("๋„ค์ด๋ฒ„ ๋ธ”๋กœ๊ทธ", "ํŽ˜์ด์ง€ ๋กœ๋“œ ์ค‘ ๋„คํŠธ์›Œํฌ ์˜ค๋ฅ˜") + raise BlogServiceUnavailableException( + "๋„ค์ด๋ฒ„ ๋ธ”๋กœ๊ทธ", "ํŽ˜์ด์ง€ ๋กœ๋“œ ์ค‘ ๋„คํŠธ์›Œํฌ ์˜ค๋ฅ˜" + ) except Exception as e: - raise BlogPostPublishException("๋„ค์ด๋ฒ„ ๋ธ”๋กœ๊ทธ", f"์˜ˆ์ƒ์น˜ ๋ชปํ•œ ์˜ค๋ฅ˜: {str(e)}") + raise BlogPostPublishException( + "๋„ค์ด๋ฒ„ ๋ธ”๋กœ๊ทธ", f"์˜ˆ์ƒ์น˜ ๋ชปํ•œ ์˜ค๋ฅ˜: {str(e)}" + ) diff --git a/apps/pre-processing-service/app/service/blog/tistory_blog_post_service.py b/apps/pre-processing-service/app/service/blog/tistory_blog_post_service.py index bcb2abaf..cc830bac 100644 --- a/apps/pre-processing-service/app/service/blog/tistory_blog_post_service.py +++ b/apps/pre-processing-service/app/service/blog/tistory_blog_post_service.py @@ -9,6 +9,7 @@ from app.errors.BlogPostingException import * from app.service.blog.base_blog_post_service import BaseBlogPostService + class TistoryBlogPostService(BaseBlogPostService): """ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŒ… ์„œ๋น„์Šค""" @@ -24,7 +25,9 @@ def _load_config(self) -> None: def _get_platform_name(self) -> str: return "TISTORY_BLOG" - def _validate_content(self, title: str, content: str, tags: Optional[List[str]] = None) -> None: + def _validate_content( + self, title: str, content: str, tags: Optional[List[str]] = None + ) -> None: """๊ณตํ†ต ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ ๋กœ์ง""" if not title or not title.strip(): @@ -81,7 +84,9 @@ def _login(self) -> None: except TimeoutException: raise PageLoadTimeoutException(self.login_url) except WebDriverConnectionException: - raise BlogServiceUnavailableException("ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ", "๋„คํŠธ์›Œํฌ ์—ฐ๊ฒฐ ์˜ค๋ฅ˜ ๋˜๋Š” ํŽ˜์ด์ง€ ๋กœ๋“œ ์‹คํŒจ") + raise BlogServiceUnavailableException( + "ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ", "๋„คํŠธ์›Œํฌ ์—ฐ๊ฒฐ ์˜ค๋ฅ˜ ๋˜๋Š” ํŽ˜์ด์ง€ ๋กœ๋“œ ์‹คํŒจ" + ) except Exception as e: raise BlogLoginException("ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ", f"์˜ˆ์ƒ์น˜ ๋ชปํ•œ ์˜ค๋ฅ˜: {str(e)}") @@ -107,7 +112,11 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No try: iframe = self.wait_driver.until( EC.presence_of_element_located( - (By.XPATH, "//iframe[contains(@title, 'Rich Text Area') or contains(@id, 'editor')]")) + ( + By.XPATH, + "//iframe[contains(@title, 'Rich Text Area') or contains(@id, 'editor')]", + ) + ) ) self.web_driver.switch_to.frame(iframe) @@ -125,13 +134,15 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No content_selectors = [ "//div[@contenteditable='true']", "//textarea[contains(@class, 'editor')]", - "//div[contains(@class, 'editor')]" + "//div[contains(@class, 'editor')]", ] content_area = None for selector in content_selectors: try: - content_area = self.web_driver.find_element(By.XPATH, selector) + content_area = self.web_driver.find_element( + By.XPATH, selector + ) break except: continue @@ -140,7 +151,9 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No content_area.clear() content_area.send_keys(content) else: - raise BlogElementInteractionException("๋ณธ๋ฌธ ์ž…๋ ฅ ํ•„๋“œ", "๋ณธ๋ฌธ ์ž…๋ ฅ") + raise BlogElementInteractionException( + "๋ณธ๋ฌธ ์ž…๋ ฅ ํ•„๋“œ", "๋ณธ๋ฌธ ์ž…๋ ฅ" + ) except Exception: raise BlogElementInteractionException("๋ณธ๋ฌธ ์ž…๋ ฅ ํ•„๋“œ", "๋ณธ๋ฌธ ์ž…๋ ฅ") @@ -150,7 +163,11 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No try: tag_input = self.wait_driver.until( EC.presence_of_element_located( - (By.XPATH, "//input[@placeholder='ํƒœ๊ทธ์ž…๋ ฅ' or contains(@placeholder, 'ํƒœ๊ทธ')]")) + ( + By.XPATH, + "//input[@placeholder='ํƒœ๊ทธ์ž…๋ ฅ' or contains(@placeholder, 'ํƒœ๊ทธ')]", + ) + ) ) tag_input.clear() @@ -192,27 +209,37 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No publish_selectors = [ "//button[contains(text(), '๋ฐœํ–‰')]", "//button[contains(text(), '์ €์žฅ')]", - "//*[@class='btn_publish' or contains(@class, 'publish')]" + "//*[@class='btn_publish' or contains(@class, 'publish')]", ] for selector in publish_selectors: try: - publish_btn = self.web_driver.find_element(By.XPATH, selector) + publish_btn = self.web_driver.find_element( + By.XPATH, selector + ) publish_btn.click() break except: continue else: - raise BlogPostPublishException("ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ", "๋ฐœํ–‰ ๋ฒ„ํŠผ์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค") + raise BlogPostPublishException( + "ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ", "๋ฐœํ–‰ ๋ฒ„ํŠผ์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค" + ) except Exception: - raise BlogPostPublishException("ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ", "๋ฐœํ–‰ ๊ณผ์ •์—์„œ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค") + raise BlogPostPublishException( + "ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ", "๋ฐœํ–‰ ๊ณผ์ •์—์„œ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค" + ) except (BlogElementInteractionException, BlogPostPublishException): raise except TimeoutException: raise PageLoadTimeoutException(self.post_content_url) except WebDriverConnectionException: - raise BlogServiceUnavailableException("ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ", "ํŽ˜์ด์ง€ ๋กœ๋“œ ์ค‘ ๋„คํŠธ์›Œํฌ ์˜ค๋ฅ˜") + raise BlogServiceUnavailableException( + "ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ", "ํŽ˜์ด์ง€ ๋กœ๋“œ ์ค‘ ๋„คํŠธ์›Œํฌ ์˜ค๋ฅ˜" + ) except Exception as e: - raise BlogPostPublishException("ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ", f"์˜ˆ์ƒ์น˜ ๋ชปํ•œ ์˜ค๋ฅ˜: {str(e)}") + raise BlogPostPublishException( + "ํ‹ฐ์Šคํ† ๋ฆฌ ๋ธ”๋กœ๊ทธ", f"์˜ˆ์ƒ์น˜ ๋ชปํ•œ ์˜ค๋ฅ˜: {str(e)}" + ) diff --git a/apps/pre-processing-service/app/service/crawl_service.py b/apps/pre-processing-service/app/service/crawl_service.py index 11844ead..52f68578 100644 --- a/apps/pre-processing-service/app/service/crawl_service.py +++ b/apps/pre-processing-service/app/service/crawl_service.py @@ -1,49 +1,62 @@ -# app/service/crawl_service.py import time from app.utils.crawler_utils import DetailCrawler from app.errors.CustomException import InvalidItemDataException from app.model.schemas import RequestSadaguCrawl - - -async def crawl_product_detail(request: RequestSadaguCrawl) -> dict: - """ - ์„ ํƒ๋œ ์ƒํ’ˆ์˜ ์ƒ์„ธ ์ •๋ณด๋ฅผ ํฌ๋กค๋งํ•˜๋Š” ๋น„์ฆˆ๋‹ˆ์Šค ๋กœ์ง์ž…๋‹ˆ๋‹ค. (5๋‹จ๊ณ„) - ์ƒํ’ˆ URL์„ ์ž…๋ ฅ๋ฐ›์•„ ์ƒ์„ธ ์ •๋ณด๋ฅผ ํฌ๋กค๋งํ•˜์—ฌ ๋”•์…”๋„ˆ๋ฆฌ๋กœ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค. - """ - crawler = DetailCrawler(use_selenium=request.use_selenium) - - try: - print(f"์ƒํ’ˆ ์ƒ์„ธ ํฌ๋กค๋ง ์‹œ์ž‘: {request.product_url}") - - # ์ƒ์„ธ ์ •๋ณด ํฌ๋กค๋ง ์‹คํ–‰ - product_detail = await crawler.crawl_detail( - product_url=str(request.product_url), - include_images=request.include_images - ) - - if not product_detail: - raise InvalidItemDataException("์ƒํ’ˆ ์ƒ์„ธ ์ •๋ณด ํฌ๋กค๋ง ์‹คํŒจ") - - print(f"ํฌ๋กค๋ง ์™„๋ฃŒ: {product_detail.get('title', 'Unknown')[:50]}") - - # ์‘๋‹ต ๋ฐ์ดํ„ฐ ๊ตฌ์„ฑ - response_data = { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "tag": request.tag, - "product_url": str(request.product_url), - "use_selenium": request.use_selenium, - "include_images": request.include_images, - "product_detail": product_detail, - "status": "success", - "crawled_at": time.strftime('%Y-%m-%d %H:%M:%S') - } - - return response_data - - except Exception as e: - print(f"ํฌ๋กค๋ง ์„œ๋น„์Šค ์˜ค๋ฅ˜: {e}") - raise InvalidItemDataException(f"์ƒํ’ˆ ์ƒ์„ธ ํฌ๋กค๋ง ์˜ค๋ฅ˜: {e}") - finally: - await crawler.close() \ No newline at end of file +from loguru import logger + + +class CrawlService: + def __init__(self): + pass + + async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: + """ + ์„ ํƒ๋œ ์ƒํ’ˆ์˜ ์ƒ์„ธ ์ •๋ณด๋ฅผ ํฌ๋กค๋งํ•˜๋Š” ๋น„์ฆˆ๋‹ˆ์Šค ๋กœ์ง์ž…๋‹ˆ๋‹ค. (5๋‹จ๊ณ„) + ์ƒํ’ˆ URL์„ ์ž…๋ ฅ๋ฐ›์•„ ์ƒ์„ธ ์ •๋ณด๋ฅผ ํฌ๋กค๋งํ•˜์—ฌ ๋”•์…”๋„ˆ๋ฆฌ๋กœ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค. + """ + crawler = DetailCrawler(use_selenium=True) + + try: + logger.info( + f"์ƒํ’ˆ ์ƒ์„ธ ํฌ๋กค๋ง ์„œ๋น„์Šค ์‹œ์ž‘: job_id={request.job_id}, schedule_id={request.schedule_id}, product_url={request.product_url}" + ) + + # ์ƒ์„ธ ์ •๋ณด ํฌ๋กค๋ง ์‹คํ–‰ + product_detail = await crawler.crawl_detail( + product_url=str(request.product_url), include_images=False + ) + + if not product_detail: + logger.error(f"์ƒํ’ˆ ์ƒ์„ธ ์ •๋ณด ํฌ๋กค๋ง ์‹คํŒจ: url={request.product_url}") + raise InvalidItemDataException("์ƒํ’ˆ ์ƒ์„ธ ์ •๋ณด ํฌ๋กค๋ง ์‹คํŒจ") + + product_title = product_detail.get("title", "Unknown")[:50] + logger.success( + f"ํฌ๋กค๋ง ์™„๋ฃŒ: title='{product_title}', price={product_detail.get('price', 0)}, options_count={len(product_detail.get('options', []))}" + ) + + # ์‘๋‹ต ๋ฐ์ดํ„ฐ ๊ตฌ์„ฑ + response_data = { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "tag": request.tag, + "product_url": str(request.product_url), + "product_detail": product_detail, + "status": "success", + "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), + } + + logger.info( + f"์ƒํ’ˆ ์ƒ์„ธ ํฌ๋กค๋ง ์„œ๋น„์Šค ์™„๋ฃŒ: job_id={request.job_id}, status=success" + ) + return response_data + + except Exception as e: + logger.error( + f"ํฌ๋กค๋ง ์„œ๋น„์Šค ์˜ค๋ฅ˜: job_id={request.job_id}, product_url={request.product_url}, error='{e}'" + ) + raise InvalidItemDataException(f"์ƒํ’ˆ ์ƒ์„ธ ํฌ๋กค๋ง ์˜ค๋ฅ˜: {e}") + finally: + await crawler.close() + logger.debug("ํฌ๋กค๋Ÿฌ ๋ฆฌ์†Œ์Šค ์ •๋ฆฌ ์™„๋ฃŒ") diff --git a/apps/pre-processing-service/app/service/keyword_service.py b/apps/pre-processing-service/app/service/keyword_service.py index da39aac9..575767ee 100644 --- a/apps/pre-processing-service/app/service/keyword_service.py +++ b/apps/pre-processing-service/app/service/keyword_service.py @@ -8,18 +8,21 @@ from ..errors.CustomException import InvalidItemDataException from ..model.schemas import RequestNaverSearch + async def keyword_search(request: RequestNaverSearch) -> dict: """ ๋„ค์ด๋ฒ„ ๊ฒ€์ƒ‰ ์š”์ฒญ์„ ์ฒ˜๋ฆฌํ•˜๋Š” ๋น„์ฆˆ๋‹ˆ์Šค ๋กœ์ง์ž…๋‹ˆ๋‹ค. ์ž…๋ ฅ๋ฐ›์€ ๋ฐ์ดํ„ฐ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์‘๋‹ต ๋ฐ์ดํ„ฐ๋ฅผ ์ƒ์„ฑํ•˜์—ฌ ๋”•์…”๋„ˆ๋ฆฌ๋กœ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค. """ - #ํ‚ค์›Œ๋“œ ๊ฒ€์ƒ‰ + # ํ‚ค์›Œ๋“œ ๊ฒ€์ƒ‰ if request.tag == "naver": - trending_keywords = await search_naver_rank(**request.model_dump(include={'category', 'start_date', 'end_date'})) + trending_keywords = await search_naver_rank( + **request.model_dump(include={"category", "start_date", "end_date"}) + ) elif request.tag == "naver_store": trending_keywords = await search_naver_store() - else : + else: raise InvalidItemDataException() if not trending_keywords: @@ -31,7 +34,8 @@ async def keyword_search(request: RequestNaverSearch) -> dict: response_data["status"] = "success" return response_data -async def search_naver_rank(category,start_date,end_date) -> dict[int,str]: + +async def search_naver_rank(category, start_date, end_date) -> dict[int, str]: """ ๋„ค์ด๋ฒ„ ๋ฐ์ดํ„ฐ ๋žฉ ํ‚ค์›Œ๋“œ ๊ฒ€์ƒ‰ ๋ชจ๋“ˆ """ @@ -39,9 +43,9 @@ async def search_naver_rank(category,start_date,end_date) -> dict[int,str]: headers = { "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", "Referer": "https://datalab.naver.com/shoppingInsight/sCategory.naver", - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36", } - keywords_dic ={} + keywords_dic = {} async with httpx.AsyncClient() as client: for page in range(1, 3): payload = { @@ -58,15 +62,19 @@ async def search_naver_rank(category,start_date,end_date) -> dict[int,str]: response = await client.post(url, headers=headers, data=payload) response.raise_for_status() data = response.json() - for item in data.get('ranks', []): - keywords_dic[item.get('rank')] = item.get('keyword') - except (httpx.HTTPStatusError, httpx.RequestError, json.JSONDecodeError) as e: + for item in data.get("ranks", []): + keywords_dic[item.get("rank")] = item.get("keyword") + except ( + httpx.HTTPStatusError, + httpx.RequestError, + json.JSONDecodeError, + ) as e: print(f"๋„ค์ด๋ฒ„ ๋ฐ์ดํ„ฐ๋žฉ์—์„œ ๋ฐ์ดํ„ฐ๋ฅผ ๊ฐ€์ ธ์˜ค๋Š” ๋ฐ ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค: {e}") raise InvalidItemDataException return keywords_dic -async def search_naver_store() -> dict[int,str]: +async def search_naver_store() -> dict[int, str]: """ ๋„ค์ด๋ฒ„ ์Šคํ† ์–ด์˜ ์ผ์ผ ์ธ๊ธฐ ๊ฒ€์ƒ‰์–ด ์ˆœ์œ„ ๋ฐ์ดํ„ฐ๋ฅผ ๊ฐ€์ ธ์˜ต๋‹ˆ๋‹ค. API ์‘๋‹ต์˜ 'keyword' ํ•„๋“œ๋ฅผ 'title'๋กœ ๋ณ€๊ฒฝํ•˜์—ฌ ์ „์ฒด ์ˆœ์œ„ ๋ชฉ๋ก์„ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค. @@ -83,10 +91,10 @@ async def search_naver_store() -> dict[int,str]: keyword_dict = {} for item in data: - keyword_dict[item['rank']] = item['title'] + keyword_dict[item["rank"]] = item["title"] return keyword_dict except (httpx.HTTPStatusError, httpx.RequestError, json.JSONDecodeError) as e: print(f"๋„ค์ด๋ฒ„ ์Šคํ† ์–ด์—์„œ ๋ฐ์ดํ„ฐ๋ฅผ ๊ฐ€์ ธ์˜ค๋Š” ๋ฐ ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค: {e}") - raise InvalidItemDataException from e \ No newline at end of file + raise InvalidItemDataException from e diff --git a/apps/pre-processing-service/app/service/match_service.py b/apps/pre-processing-service/app/service/match_service.py index 6b1cc171..613f301a 100644 --- a/apps/pre-processing-service/app/service/match_service.py +++ b/apps/pre-processing-service/app/service/match_service.py @@ -1,66 +1,95 @@ from app.utils.keyword_matcher import KeywordMatcher from app.errors.CustomException import InvalidItemDataException from ..model.schemas import RequestSadaguMatch +from loguru import logger -def match_products(request: RequestSadaguMatch) -> dict: - """ - ํ‚ค์›Œ๋“œ ๋งค์นญ ๋กœ์ง (MeCab ๋“ฑ ์‚ฌ์šฉ) - 3๋‹จ๊ณ„ - """ - keyword = request.keyword - products = request.search_results - - if not products: - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "matched_products": [], - "status": "success" - } - - try: - matcher = KeywordMatcher() - matched_products = [] - - print(f"ํ‚ค์›Œ๋“œ '{keyword}'์™€ {len(products)}๊ฐœ ์ƒํ’ˆ ๋งค์นญ ๋ถ„์„ ์‹œ์ž‘...") - - for i, product in enumerate(products): - title = product.get('title', '') - if not title: - continue - - # ํ‚ค์›Œ๋“œ ๋งค์นญ ๋ถ„์„ - match_result = matcher.analyze_keyword_match(title, keyword) - - print(f"์ƒํ’ˆ {i + 1}: {title[:50]} | {match_result['reason']}") - - if match_result['is_match']: - # ๋งค์นญ๋œ ์ƒํ’ˆ์— ๋งค์นญ ์ •๋ณด ์ถ”๊ฐ€ - matched_product = product.copy() - matched_product['match_info'] = { - 'match_type': match_result['match_type'], - 'match_score': match_result['score'], - 'match_reason': match_result['reason'] - } - matched_products.append(matched_product) - print(f" โœ… ๋งค์นญ๋จ!") - - print(f"๋งค์นญ ๊ฒฐ๊ณผ: {len(matched_products)}๊ฐœ ์ƒํ’ˆ") - - # ๋งค์นญ ์Šค์ฝ”์–ด ๊ธฐ์ค€์œผ๋กœ ์ •๋ ฌ (๋†’์€ ์ˆœ) - matched_products.sort(key=lambda x: x['match_info']['match_score'], reverse=True) - - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "matched_products": matched_products, - "status": "success" - } - - except Exception as e: - print(f"๋งค์นญ ์„œ๋น„์Šค ์˜ค๋ฅ˜: {e}") - raise InvalidItemDataException(f"ํ‚ค์›Œ๋“œ ๋งค์นญ ์‹คํŒจ: {str(e)}") \ No newline at end of file +class MatchService: + def __init__(self): + pass + + def match_products(self, request: RequestSadaguMatch) -> dict: + """ + ํ‚ค์›Œ๋“œ ๋งค์นญ ๋กœ์ง (MeCab ๋“ฑ ์‚ฌ์šฉ) - 3๋‹จ๊ณ„ + """ + keyword = request.keyword + products = request.search_results + + logger.info( + f"ํ‚ค์›Œ๋“œ ๋งค์นญ ์„œ๋น„์Šค ์‹œ์ž‘: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}', products_count={len(products) if products else 0}" + ) + + if not products: + logger.warning(f"๋งค์นญํ•  ์ƒํ’ˆ์ด ์—†์Œ: keyword='{keyword}'") + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "matched_products": [], + "status": "success", + } + + try: + matcher = KeywordMatcher() + matched_products = [] + + logger.info( + f"ํ‚ค์›Œ๋“œ '{keyword}'์™€ {len(products)}๊ฐœ ์ƒํ’ˆ ๋งค์นญ ๋ถ„์„ ์‹œ์ž‘..." + ) + + for i, product in enumerate(products): + title = product.get("title", "") + if not title: + logger.debug(f"์ƒํ’ˆ {i + 1}: ์ œ๋ชฉ์ด ์—†์–ด์„œ ์Šคํ‚ต") + continue + + logger.debug(f"์ƒํ’ˆ {i + 1} ๋งค์นญ ๋ถ„์„ ์‹œ์ž‘: title='{title[:50]}'") + + # ํ‚ค์›Œ๋“œ ๋งค์นญ ๋ถ„์„ + match_result = matcher.analyze_keyword_match(title, keyword) + + logger.debug(f"์ƒํ’ˆ {i + 1} ๋งค์นญ ๊ฒฐ๊ณผ: {match_result['reason']}") + + if match_result["is_match"]: + # ๋งค์นญ๋œ ์ƒํ’ˆ์— ๋งค์นญ ์ •๋ณด ์ถ”๊ฐ€ + matched_product = product.copy() + matched_product["match_info"] = { + "match_type": match_result["match_type"], + "match_score": match_result["score"], + "match_reason": match_result["reason"], + } + matched_products.append(matched_product) + logger.info( + f"์ƒํ’ˆ {i + 1} ๋งค์นญ ์„ฑ๊ณต: title='{title[:30]}', type={match_result['match_type']}, score={match_result['score']:.3f}" + ) + + # ๋งค์นญ ์Šค์ฝ”์–ด ๊ธฐ์ค€์œผ๋กœ ์ •๋ ฌ (๋†’์€ ์ˆœ) + matched_products.sort( + key=lambda x: x["match_info"]["match_score"], reverse=True + ) + + logger.success( + f"ํ‚ค์›Œ๋“œ ๋งค์นญ ์™„๋ฃŒ: keyword='{keyword}', total_products={len(products)}, matched_products={len(matched_products)}" + ) + + if matched_products: + best_match = matched_products[0] + logger.info( + f"์ตœ๊ณ  ๋งค์นญ ์ƒํ’ˆ: title='{best_match['title'][:30]}', score={best_match['match_info']['match_score']:.3f}" + ) + + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "matched_products": matched_products, + "status": "success", + } + + except Exception as e: + logger.error( + f"๋งค์นญ ์„œ๋น„์Šค ์˜ค๋ฅ˜: job_id={request.job_id}, keyword='{keyword}', error='{e}'" + ) + raise InvalidItemDataException(f"ํ‚ค์›Œ๋“œ ๋งค์นญ ์‹คํŒจ: {str(e)}") diff --git a/apps/pre-processing-service/app/service/search_service.py b/apps/pre-processing-service/app/service/search_service.py index da7aa1fd..a130db46 100644 --- a/apps/pre-processing-service/app/service/search_service.py +++ b/apps/pre-processing-service/app/service/search_service.py @@ -1,81 +1,109 @@ from app.utils.crawler_utils import SearchCrawler from app.errors.CustomException import InvalidItemDataException from ..model.schemas import RequestSadaguSearch +from loguru import logger -async def search_products(request: RequestSadaguSearch) -> dict: - """ - ํ‚ค์›Œ๋“œ ๊ธฐ๋ฐ˜์œผ๋กœ ์ƒํ’ˆ์„ ๊ฒ€์ƒ‰ํ•˜๋Š” ๋น„์ฆˆ๋‹ˆ์Šค ๋กœ์ง (2๋‹จ๊ณ„) - """ - keyword = request.keyword - crawler = SearchCrawler(use_selenium=True) +class SearchService: + def __init__(self): + pass - try: - print(f"ํ‚ค์›Œ๋“œ '{keyword}'๋กœ ์ƒํ’ˆ ๊ฒ€์ƒ‰ ์‹œ์ž‘...") + async def search_products(self, request: RequestSadaguSearch) -> dict: + """ + ํ‚ค์›Œ๋“œ ๊ธฐ๋ฐ˜์œผ๋กœ ์ƒํ’ˆ์„ ๊ฒ€์ƒ‰ํ•˜๋Š” ๋น„์ฆˆ๋‹ˆ์Šค ๋กœ์ง (2๋‹จ๊ณ„) + """ + keyword = request.keyword + crawler = SearchCrawler(use_selenium=True) - # Selenium ๋˜๋Š” httpx๋กœ ์ƒํ’ˆ ๊ฒ€์ƒ‰ - if crawler.use_selenium: - search_results = await crawler.search_products_selenium(keyword) - else: - search_results = await crawler.search_products_httpx(keyword) + try: + logger.info( + f"์ƒํ’ˆ ๊ฒ€์ƒ‰ ์„œ๋น„์Šค ์‹œ์ž‘: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}'" + ) - if not search_results: - print("๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.") - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "search_results": [], - "status": "success" - } + # Selenium ๋˜๋Š” httpx๋กœ ์ƒํ’ˆ ๊ฒ€์ƒ‰ + if crawler.use_selenium: + search_results = await crawler.search_products_selenium(keyword) + else: + search_results = await crawler.search_products_httpx(keyword) - # ์ƒํ’ˆ๋ณ„ ๊ธฐ๋ณธ ์ •๋ณด ์ˆ˜์ง‘ (์ œ๋ชฉ์ด ์—†๋Š” ๊ฒฝ์šฐ ๋‹ค์‹œ ํฌ๋กค๋ง) - enriched_results = [] - print(f"์ด {len(search_results)}๊ฐœ ์ƒํ’ˆ์˜ ๊ธฐ๋ณธ ์ •๋ณด๋ฅผ ์ˆ˜์ง‘ ์ค‘...") + if not search_results: + logger.warning(f"๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค: keyword='{keyword}'") + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "search_results": [], + "status": "success", + } - for i, product in enumerate(search_results): - try: - # ์ด๋ฏธ ์ œ๋ชฉ์ด ์žˆ๊ณ  ์œ ํšจํ•œ ๊ฒฝ์šฐ ๊ทธ๋Œ€๋กœ ์‚ฌ์šฉ - if product.get('title') and product['title'] != 'Unknown Title' and len(product['title'].strip()) > 0: - enriched_results.append(product) - else: - # ์ œ๋ชฉ์ด ์—†๊ฑฐ๋‚˜ ์œ ํšจํ•˜์ง€ ์•Š์€ ๊ฒฝ์šฐ ๋‹ค์‹œ ํฌ๋กค๋ง - print(f"์ƒํ’ˆ {i + 1}: ์ œ๋ชฉ ์žฌ์ˆ˜์ง‘ ์ค‘... ({product['url']})") - basic_info = await crawler.get_basic_product_info(product['url']) + # ์ƒํ’ˆ๋ณ„ ๊ธฐ๋ณธ ์ •๋ณด ์ˆ˜์ง‘ (์ œ๋ชฉ์ด ์—†๋Š” ๊ฒฝ์šฐ ๋‹ค์‹œ ํฌ๋กค๋ง) + enriched_results = [] + logger.info(f"์ด {len(search_results)}๊ฐœ ์ƒํ’ˆ์˜ ๊ธฐ๋ณธ ์ •๋ณด๋ฅผ ์ˆ˜์ง‘ ์ค‘...") - if basic_info and basic_info['title'] != "์ œ๋ชฉ ์—†์Œ": - enriched_results.append({ - 'url': product['url'], - 'title': basic_info['title'] - }) + for i, product in enumerate(search_results): + try: + # ์ด๋ฏธ ์ œ๋ชฉ์ด ์žˆ๊ณ  ์œ ํšจํ•œ ๊ฒฝ์šฐ ๊ทธ๋Œ€๋กœ ์‚ฌ์šฉ + if ( + product.get("title") + and product["title"] != "Unknown Title" + and len(product["title"].strip()) > 0 + ): + enriched_results.append(product) + logger.debug( + f"์ƒํ’ˆ {i + 1}: ๊ธฐ์กด ์ œ๋ชฉ ์‚ฌ์šฉ - '{product['title'][:30]}'" + ) else: - # ๊ทธ๋ž˜๋„ ์ œ๋ชฉ์„ ๋ชป ์ฐพ์œผ๋ฉด ์ œ์™ธ - print(f" ์ œ๋ชฉ ์ถ”์ถœ ์‹คํŒจ, ์ œ์™ธ") - continue + # ์ œ๋ชฉ์ด ์—†๊ฑฐ๋‚˜ ์œ ํšจํ•˜์ง€ ์•Š์€ ๊ฒฝ์šฐ ๋‹ค์‹œ ํฌ๋กค๋ง + logger.debug( + f"์ƒํ’ˆ {i + 1}: ์ œ๋ชฉ ์žฌ์ˆ˜์ง‘ ์ค‘... ({product['url']})" + ) + basic_info = await crawler.get_basic_product_info( + product["url"] + ) + + if basic_info and basic_info["title"] != "์ œ๋ชฉ ์—†์Œ": + enriched_results.append( + {"url": product["url"], "title": basic_info["title"]} + ) + logger.debug( + f"์ƒํ’ˆ {i + 1}: ์ œ๋ชฉ ์žฌ์ˆ˜์ง‘ ์„ฑ๊ณต - '{basic_info['title'][:30]}'" + ) + else: + # ๊ทธ๋ž˜๋„ ์ œ๋ชฉ์„ ๋ชป ์ฐพ์œผ๋ฉด ์ œ์™ธ + logger.debug(f"์ƒํ’ˆ {i + 1}: ์ œ๋ชฉ ์ถ”์ถœ ์‹คํŒจ, ์ œ์™ธ") + continue - # ์ตœ๋Œ€ 20๊ฐœ๊นŒ์ง€๋งŒ ์ฒ˜๋ฆฌ - if len(enriched_results) >= 20: - break + # ์ตœ๋Œ€ 20๊ฐœ๊นŒ์ง€๋งŒ ์ฒ˜๋ฆฌ + if len(enriched_results) >= 20: + logger.info("์ตœ๋Œ€ 20๊ฐœ ์ƒํ’ˆ ์ˆ˜์ง‘ ์™„๋ฃŒ") + break - except Exception as e: - print(f"์ƒํ’ˆ {i + 1} ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜: {e}") - continue + except Exception as e: + logger.error( + f"์ƒํ’ˆ {i + 1} ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜: url={product.get('url', 'N/A')}, error='{e}'" + ) + continue - print(f"์ตœ์ข… ์ˆ˜์ง‘๋œ ์œ ํšจ ์ƒํ’ˆ: {len(enriched_results)}๊ฐœ") + logger.success( + f"์ƒํ’ˆ ๊ฒ€์ƒ‰ ์™„๋ฃŒ: keyword='{keyword}', ์ดˆ๊ธฐ๊ฒ€์ƒ‰={len(search_results)}๊ฐœ, ์ตœ์ข…์œ ํšจ์ƒํ’ˆ={len(enriched_results)}๊ฐœ" + ) - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "search_results": enriched_results, - "status": "success" - } + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "search_results": enriched_results, + "status": "success", + } - except Exception as e: - print(f"๊ฒ€์ƒ‰ ์„œ๋น„์Šค ์˜ค๋ฅ˜: {e}") - raise InvalidItemDataException(f"์ƒํ’ˆ ๊ฒ€์ƒ‰ ์‹คํŒจ: {str(e)}") + except Exception as e: + logger.error( + f"๊ฒ€์ƒ‰ ์„œ๋น„์Šค ์˜ค๋ฅ˜: job_id={request.job_id}, keyword='{keyword}', error='{e}'" + ) + raise InvalidItemDataException(f"์ƒํ’ˆ ๊ฒ€์ƒ‰ ์‹คํŒจ: {str(e)}") - finally: - await crawler.close() \ No newline at end of file + finally: + await crawler.close() + logger.debug("๊ฒ€์ƒ‰ ํฌ๋กค๋Ÿฌ ๋ฆฌ์†Œ์Šค ์ •๋ฆฌ ์™„๋ฃŒ") diff --git a/apps/pre-processing-service/app/service/similarity_service.py b/apps/pre-processing-service/app/service/similarity_service.py index 27823e9e..bd573eec 100644 --- a/apps/pre-processing-service/app/service/similarity_service.py +++ b/apps/pre-processing-service/app/service/similarity_service.py @@ -1,137 +1,177 @@ from app.utils.similarity_analyzer import SimilarityAnalyzer from app.errors.CustomException import InvalidItemDataException from ..model.schemas import RequestSadaguSimilarity - - -def select_product_by_similarity(request: RequestSadaguSimilarity) -> dict: - """ - BERT ๊ธฐ๋ฐ˜ ์œ ์‚ฌ๋„ ๋ถ„์„ ํ›„ ์ƒํ’ˆ ์„ ํƒ - 4๋‹จ๊ณ„ - """ - keyword = request.keyword - candidates = request.matched_products - fallback_products = request.search_results or [] - - # ๋งค์นญ๋œ ์ƒํ’ˆ์ด ์—†์œผ๋ฉด ์ „์ฒด ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๋กœ ํด๋ฐฑ - if not candidates: - if not fallback_products: - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "selected_product": None, - "reason": "๋งค์นญ๋œ ์ƒํ’ˆ๊ณผ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ๋ชจ๋‘ ์—†์Œ", - "status": "success" - } - - print("๋งค์นญ๋œ ์ƒํ’ˆ ์—†์Œ โ†’ ์ „์ฒด ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ์—์„œ ์œ ์‚ฌ๋„ ๋ถ„์„") - candidates = fallback_products - analysis_mode = "fallback_similarity_only" - else: - analysis_mode = "matched_products" - - try: - analyzer = SimilarityAnalyzer() - - print(f"ํ‚ค์›Œ๋“œ '{keyword}'์™€ {len(candidates)}๊ฐœ ์ƒํ’ˆ์˜ ์œ ์‚ฌ๋„ ๋ถ„์„ ์‹œ์ž‘... (๋ชจ๋“œ: {analysis_mode})") - - # ํ•œ ๊ฐœ๋งŒ ์žˆ์œผ๋ฉด ๋ฐ”๋กœ ์„ ํƒ - if len(candidates) == 1: - selected_product = candidates[0] - - # ์œ ์‚ฌ๋„ ๊ณ„์‚ฐ - similarity = analyzer.calculate_similarity(keyword, selected_product['title']) +from loguru import logger + + +class SimilarityService: + def __init__(self): + pass + + def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict: + """ + BERT ๊ธฐ๋ฐ˜ ์œ ์‚ฌ๋„ ๋ถ„์„ ํ›„ ์ƒํ’ˆ ์„ ํƒ - 4๋‹จ๊ณ„ + """ + keyword = request.keyword + candidates = request.matched_products + fallback_products = request.search_results or [] + + logger.info( + f"์œ ์‚ฌ๋„ ๋ถ„์„ ์„œ๋น„์Šค ์‹œ์ž‘: job_id={request.job_id}, keyword='{keyword}', matched_count={len(candidates) if candidates else 0}, fallback_count={len(fallback_products)}" + ) + + # ๋งค์นญ๋œ ์ƒํ’ˆ์ด ์—†์œผ๋ฉด ์ „์ฒด ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๋กœ ํด๋ฐฑ + if not candidates: + if not fallback_products: + logger.warning( + f"๋งค์นญ๋œ ์ƒํ’ˆ๊ณผ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ๋ชจ๋‘ ์—†์Œ: keyword='{keyword}'" + ) + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "selected_product": None, + "reason": "๋งค์นญ๋œ ์ƒํ’ˆ๊ณผ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ๋ชจ๋‘ ์—†์Œ", + "status": "success", + } + + logger.info("๋งค์นญ๋œ ์ƒํ’ˆ ์—†์Œ โ†’ ์ „์ฒด ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ์—์„œ ์œ ์‚ฌ๋„ ๋ถ„์„") + candidates = fallback_products + analysis_mode = "fallback_similarity_only" + else: + analysis_mode = "matched_products" + + try: + analyzer = SimilarityAnalyzer() + + logger.info( + f"ํ‚ค์›Œ๋“œ '{keyword}'์™€ {len(candidates)}๊ฐœ ์ƒํ’ˆ์˜ ์œ ์‚ฌ๋„ ๋ถ„์„ ์‹œ์ž‘... (๋ชจ๋“œ: {analysis_mode})" + ) + + # ํ•œ ๊ฐœ๋งŒ ์žˆ์œผ๋ฉด ๋ฐ”๋กœ ์„ ํƒ + if len(candidates) == 1: + selected_product = candidates[0] + + logger.info("๋‹จ์ผ ํ›„๋ณด ์ƒํ’ˆ - ์œ ์‚ฌ๋„ ๊ฒ€์ฆ ์ง„ํ–‰") + # ์œ ์‚ฌ๋„ ๊ณ„์‚ฐ + similarity = analyzer.calculate_similarity( + keyword, selected_product["title"] + ) + + # ํด๋ฐฑ ๋ชจ๋“œ์—์„œ๋Š” ์ž„๊ณ„๊ฐ’ ๊ฒ€์ฆ + if analysis_mode == "fallback_similarity_only": + similarity_threshold = 0.3 + if similarity < similarity_threshold: + logger.warning( + f"๋‹จ์ผ ์ƒํ’ˆ ์œ ์‚ฌ๋„ ๋ฏธ๋‹ฌ: similarity={similarity:.4f} < threshold={similarity_threshold}" + ) + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "selected_product": None, + "reason": f"๋‹จ์ผ ์ƒํ’ˆ ์œ ์‚ฌ๋„({similarity:.4f}) < ๊ธฐ์ค€({similarity_threshold})", + "status": "success", + } + + selected_product["similarity_info"] = { + "similarity_score": float(similarity), + "analysis_type": "single_candidate", + "analysis_mode": analysis_mode, + } + + logger.success( + f"๋‹จ์ผ ์ƒํ’ˆ ์„ ํƒ ์™„๋ฃŒ: title='{selected_product['title'][:30]}', similarity={similarity:.4f}" + ) + + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "selected_product": selected_product, + "reason": f"๋‹จ์ผ ์ƒํ’ˆ - ์œ ์‚ฌ๋„: {similarity:.4f} ({analysis_mode})", + "status": "success", + } + + # ์—ฌ๋Ÿฌ ๊ฐœ๊ฐ€ ์žˆ์œผ๋ฉด ์œ ์‚ฌ๋„ ๋น„๊ต + logger.info("์—ฌ๋Ÿฌ ์ƒํ’ˆ ์ค‘ ์ตœ๊ณ  ์œ ์‚ฌ๋„๋กœ ์„ ํƒ...") + + # ์ œ๋ชฉ๋งŒ ์ถ”์ถœํ•ด์„œ ๋ฐฐ์น˜ ๋ถ„์„ + titles = [product["title"] for product in candidates] + similarity_results = analyzer.analyze_similarity_batch(keyword, titles) + + # ๊ฒฐ๊ณผ ์ถœ๋ ฅ + logger.info("์œ ์‚ฌ๋„ ๋ถ„์„ ๊ฒฐ๊ณผ:") + for i, result in enumerate(similarity_results[:5]): # ์ƒ์œ„ 5๊ฐœ๋งŒ ๋กœ๊ทธ + logger.info( + f" {i+1}์œ„: {result['title'][:40]} | ์œ ์‚ฌ๋„: {result['similarity']:.4f}" + ) + + # ์ตœ๊ณ  ์œ ์‚ฌ๋„ ์„ ํƒ + best_result = similarity_results[0] + selected_product = candidates[best_result["index"]].copy() # ํด๋ฐฑ ๋ชจ๋“œ์—์„œ๋Š” ์ž„๊ณ„๊ฐ’ ๊ฒ€์ฆ - if analysis_mode == "fallback_similarity_only": - similarity_threshold = 0.3 - if similarity < similarity_threshold: - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "selected_product": None, - "reason": f"๋‹จ์ผ ์ƒํ’ˆ ์œ ์‚ฌ๋„({similarity:.4f}) < ๊ธฐ์ค€({similarity_threshold})", - "status": "success" - } - - selected_product['similarity_info'] = { - 'similarity_score': float(similarity), - 'analysis_type': 'single_candidate', - 'analysis_mode': analysis_mode - } - - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "selected_product": selected_product, - "reason": f"๋‹จ์ผ ์ƒํ’ˆ - ์œ ์‚ฌ๋„: {similarity:.4f} ({analysis_mode})", - "status": "success" + similarity_threshold = 0.3 + if ( + analysis_mode == "fallback_similarity_only" + and best_result["similarity"] < similarity_threshold + ): + logger.warning( + f"์ตœ๊ณ  ์œ ์‚ฌ๋„ ๋ฏธ๋‹ฌ: similarity={best_result['similarity']:.4f} < threshold={similarity_threshold}" + ) + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "selected_product": None, + "reason": f"์ตœ๊ณ  ์œ ์‚ฌ๋„({best_result['similarity']:.4f}) < ๊ธฐ์ค€({similarity_threshold})", + "status": "success", + } + + # ์œ ์‚ฌ๋„ ์ •๋ณด ์ถ”๊ฐ€ + selected_product["similarity_info"] = { + "similarity_score": best_result["similarity"], + "analysis_type": "multi_candidate_bert", + "analysis_mode": analysis_mode, + "rank": 1, + "total_candidates": len(candidates), } - # ์—ฌ๋Ÿฌ ๊ฐœ๊ฐ€ ์žˆ์œผ๋ฉด ์œ ์‚ฌ๋„ ๋น„๊ต - print("์—ฌ๋Ÿฌ ์ƒํ’ˆ ์ค‘ ์ตœ๊ณ  ์œ ์‚ฌ๋„๋กœ ์„ ํƒ...") - - # ์ œ๋ชฉ๋งŒ ์ถ”์ถœํ•ด์„œ ๋ฐฐ์น˜ ๋ถ„์„ - titles = [product['title'] for product in candidates] - similarity_results = analyzer.analyze_similarity_batch(keyword, titles) - - # ๊ฒฐ๊ณผ ์ถœ๋ ฅ - for result in similarity_results: - print(f" {result['title'][:40]} | ์œ ์‚ฌ๋„: {result['similarity']:.4f}") + # ๋งค์นญ ๋ชจ๋“œ์—์„œ๋Š” ์ข…ํ•ฉ ์ ์ˆ˜๋„ ๊ณ„์‚ฐ + if analysis_mode == "matched_products" and "match_info" in selected_product: + match_score = selected_product["match_info"]["match_score"] + similarity_score = best_result["similarity"] + # ๊ฐ€์ค‘์น˜: ๋งค์นญ 40%, ์œ ์‚ฌ๋„ 60% + final_score = match_score * 0.4 + similarity_score * 0.6 + selected_product["final_score"] = final_score + reason = f"์ข…ํ•ฉ์ ์ˆ˜({final_score:.4f}) = ๋งค์นญ({match_score:.4f})*0.4 + ์œ ์‚ฌ๋„({similarity_score:.4f})*0.6" + logger.info( + f"์ข…ํ•ฉ ์ ์ˆ˜ ๊ณ„์‚ฐ: match_score={match_score:.4f}, similarity_score={similarity_score:.4f}, final_score={final_score:.4f}" + ) + else: + reason = f"์œ ์‚ฌ๋„({best_result['similarity']:.4f}) ๊ธฐ์ค€ ์„ ํƒ ({analysis_mode})" + + logger.success( + f"์ƒํ’ˆ ์„ ํƒ ์™„๋ฃŒ: title='{selected_product['title'][:30]}', {reason}" + ) - # ์ตœ๊ณ  ์œ ์‚ฌ๋„ ์„ ํƒ - best_result = similarity_results[0] - selected_product = candidates[best_result['index']].copy() - - # ํด๋ฐฑ ๋ชจ๋“œ์—์„œ๋Š” ์ž„๊ณ„๊ฐ’ ๊ฒ€์ฆ - similarity_threshold = 0.3 - if analysis_mode == "fallback_similarity_only" and best_result['similarity'] < similarity_threshold: return { "job_id": request.job_id, "schedule_id": request.schedule_id, "schedule_his_id": request.schedule_his_id, "keyword": keyword, - "selected_product": None, - "reason": f"์ตœ๊ณ  ์œ ์‚ฌ๋„({best_result['similarity']:.4f}) < ๊ธฐ์ค€({similarity_threshold})", - "status": "success" + "selected_product": selected_product, + "reason": reason, + "status": "success", } - # ์œ ์‚ฌ๋„ ์ •๋ณด ์ถ”๊ฐ€ - selected_product['similarity_info'] = { - 'similarity_score': best_result['similarity'], - 'analysis_type': 'multi_candidate_bert', - 'analysis_mode': analysis_mode, - 'rank': 1, - 'total_candidates': len(candidates) - } - - # ๋งค์นญ ๋ชจ๋“œ์—์„œ๋Š” ์ข…ํ•ฉ ์ ์ˆ˜๋„ ๊ณ„์‚ฐ - if analysis_mode == "matched_products" and 'match_info' in selected_product: - match_score = selected_product['match_info']['match_score'] - similarity_score = best_result['similarity'] - # ๊ฐ€์ค‘์น˜: ๋งค์นญ 40%, ์œ ์‚ฌ๋„ 60% - final_score = match_score * 0.4 + similarity_score * 0.6 - selected_product['final_score'] = final_score - reason = f"์ข…ํ•ฉ์ ์ˆ˜({final_score:.4f}) = ๋งค์นญ({match_score:.4f})*0.4 + ์œ ์‚ฌ๋„({similarity_score:.4f})*0.6" - else: - reason = f"์œ ์‚ฌ๋„({best_result['similarity']:.4f}) ๊ธฐ์ค€ ์„ ํƒ ({analysis_mode})" - - print(f"์„ ํƒ๋จ: {selected_product['title'][:50]} | {reason}") - - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "selected_product": selected_product, - "reason": reason, - "status": "success" - } - - except Exception as e: - print(f"์œ ์‚ฌ๋„ ๋ถ„์„ ์„œ๋น„์Šค ์˜ค๋ฅ˜: {e}") - raise InvalidItemDataException(f"์œ ์‚ฌ๋„ ๋ถ„์„ ์‹คํŒจ: {str(e)}") \ No newline at end of file + except Exception as e: + logger.error( + f"์œ ์‚ฌ๋„ ๋ถ„์„ ์„œ๋น„์Šค ์˜ค๋ฅ˜: job_id={request.job_id}, keyword='{keyword}', error='{e}'" + ) + raise InvalidItemDataException(f"์œ ์‚ฌ๋„ ๋ถ„์„ ์‹คํŒจ: {str(e)}") diff --git a/apps/pre-processing-service/app/test/test_keyword.py b/apps/pre-processing-service/app/test/test_keyword.py index e0432139..2a96796e 100644 --- a/apps/pre-processing-service/app/test/test_keyword.py +++ b/apps/pre-processing-service/app/test/test_keyword.py @@ -10,17 +10,20 @@ def test_read_root(): - response = client.get("/keyword/") + response = client.get("/keywords/") assert response.status_code == 200 assert response.json() == {"message": "keyword API"} -@pytest.mark.parametrize("tag, category, start_date, end_date", [ - ("naver", "50000000", "2025-09-01", "2025-09-02"), - ("naver", "50000001", "2025-09-01", "2025-09-02"), - ("naver", "50000002", "2025-09-01", "2025-09-02"), - ("naver_store", "", "2025-09-01", "2025-09-02"), -]) +@pytest.mark.parametrize( + "tag, category, start_date, end_date", + [ + ("naver", "50000000", "2025-09-01", "2025-09-02"), + ("naver", "50000001", "2025-09-01", "2025-09-02"), + ("naver", "50000002", "2025-09-01", "2025-09-02"), + ("naver_store", "", "2025-09-01", "2025-09-02"), + ], +) def test_search(tag, category, start_date, end_date): body = { "job_id": JOB_ID, @@ -29,10 +32,10 @@ def test_search(tag, category, start_date, end_date): "tag": tag, "category": category, "start_date": start_date, - "end_date": end_date + "end_date": end_date, } - response = client.post("/keyword/search", json=body) + response = client.post("/keywords/search", json=body) assert response.status_code == 200 response_data = response.json() @@ -41,4 +44,4 @@ def test_search(tag, category, start_date, end_date): assert response_data["schedule_his_id"] == body["schedule_his_id"] # ์˜คํƒ€ ์ˆ˜์ • assert response_data["status"] == "success" assert "keyword" in response_data - assert isinstance(response_data["total_keyword"], dict) \ No newline at end of file + assert isinstance(response_data["total_keyword"], dict) diff --git a/apps/pre-processing-service/app/test/test_mariadb_connection.py b/apps/pre-processing-service/app/test/test_mariadb_connection.py new file mode 100644 index 00000000..985d0e08 --- /dev/null +++ b/apps/pre-processing-service/app/test/test_mariadb_connection.py @@ -0,0 +1,148 @@ +import pytest +import threading +from dotenv import load_dotenv + +from app.db.mariadb_manager import MariadbManager + + +class TestMariadbManager: + """ + MariaDB Manager ํ…Œ์ŠคํŠธ + 1. ์‹ฑ๊ธ€ํ†ค ํŒจํ„ด ํ™•์ธ + 2. ํ™˜๊ฒฝ๋ณ€์ˆ˜ ๋กœ๋“œ ํ…Œ์ŠคํŠธ + 3. ์ปค๋„ฅ์…˜ํ’€ ์ดˆ๊ธฐํ™” ํ…Œ์ŠคํŠธ + 4. ์ปค์„œ ์ปจํ…์ŠคํŠธ ๋งค๋‹ˆ์ € ๋ฐ SELECT 1 ํ…Œ์ŠคํŠธ + 5. ์ปค๋„ฅ์…˜ ์ปจํ…์ŠคํŠธ ๋งค๋‹ˆ์ € ๋ฐ SELECT 1 ํ…Œ์ŠคํŠธ + """ + + def setup_method(self): + """๊ฐ ํ…Œ์ŠคํŠธ ๋ฉ”์„œ๋“œ ์‹คํ–‰ ์ „ ์ดˆ๊ธฐํ™”""" + + MariadbManager._instance = None + if hasattr(MariadbManager, "_initialized"): + MariadbManager._initialized = False + + def teardown_method(self): + """๊ฐ ํ…Œ์ŠคํŠธ ๋ฉ”์„œ๋“œ ์‹คํ–‰ ํ›„ ์ •๋ฆฌ""" + + if MariadbManager._instance and hasattr(MariadbManager._instance, "_pool"): + if MariadbManager._instance._pool: + MariadbManager._instance.close_pool() + MariadbManager._instance = None + + def test_singleton_pattern(self): + """์‹ฑ๊ธ€ํ†ค ํŒจํ„ด ํ™•์ธ ํ…Œ์ŠคํŠธ""" + + manager1 = MariadbManager() + manager2 = MariadbManager() + + assert manager1 is manager2, "์‹ฑ๊ธ€ํ†ค ํŒจํ„ด์ด ์ œ๋Œ€๋กœ ์ž‘๋™ํ•˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค" + assert id(manager1) == id(manager2), "์ธ์Šคํ„ด์Šค ID๊ฐ€ ๋‹ค๋ฆ…๋‹ˆ๋‹ค" + + instances = [] + + def create_instance(): + instance = MariadbManager() + instances.append(instance) + + threads = [] + for i in range(5): + thread = threading.Thread(target=create_instance, name=f"Thread-{i}") + threads.append(thread) + thread.start() + + for thread in threads: + thread.join() + + for i, instance in enumerate(instances): + assert instance is manager1, f"์Šค๋ ˆ๋“œ {i}์—์„œ ์ƒ์„ฑ๋œ ์ธ์Šคํ„ด์Šค๊ฐ€ ๋‹ค๋ฆ…๋‹ˆ๋‹ค" + + def test_environment_variables_load(self): + """ํ™˜๊ฒฝ๋ณ€์ˆ˜ ๋กœ๋“œ ํ…Œ์ŠคํŠธ""" + + manager = MariadbManager() + config = manager._config + + required_keys = ["host", "port", "database", "user", "password"] + for key in required_keys: + assert key in config, f"ํ•„์ˆ˜ ์„ค์ • {key}๊ฐ€ ๋ˆ„๋ฝ๋˜์—ˆ์Šต๋‹ˆ๋‹ค" + assert config[key] is not None, f"์„ค์ • {key}์˜ ๊ฐ’์ด None์ž…๋‹ˆ๋‹ค" + if isinstance(config[key], str): + assert config[key].strip() != "", f"์„ค์ • {key}์˜ ๊ฐ’์ด ๋น„์–ด์žˆ์Šต๋‹ˆ๋‹ค" + + assert isinstance(config["port"], int), "ํฌํŠธ๋Š” ์ •์ˆ˜์—ฌ์•ผ ํ•ฉ๋‹ˆ๋‹ค" + assert config["port"] > 0, "ํฌํŠธ๋Š” ์–‘์ˆ˜์—ฌ์•ผ ํ•ฉ๋‹ˆ๋‹ค" + + def test_connection_pool_initialization(self): + """์ปค๋„ฅ์…˜ํ’€ ์ดˆ๊ธฐํ™” ํ…Œ์ŠคํŠธ""" + + manager = MariadbManager() + + assert manager._pool is None, "์ดˆ๊ธฐ ํ’€ ์ƒํƒœ๊ฐ€ None์ด ์•„๋‹™๋‹ˆ๋‹ค" + + try: + manager._init_pool(pool_size=5) + except Exception as e: + pytest.fail(f"์ปค๋„ฅ์…˜ํ’€ ์ดˆ๊ธฐํ™” ์‹คํŒจ: {e}") + + assert manager._pool is not None, "ํ’€์ด ์ƒ์„ฑ๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค" + + try: + conn = manager._pool.connection() + conn.close() + except Exception as e: + pytest.fail(f"ํ’€์—์„œ ์—ฐ๊ฒฐ ํš๋“ ์‹คํŒจ: {e}") + + def test_cursor_context_manager_with_select1(self): + """์ปค์„œ ์ปจํ…์ŠคํŠธ ๋งค๋‹ˆ์ € ๋ฐ SELECT 1 ํ…Œ์ŠคํŠธ""" + + manager = MariadbManager() + + try: + with manager.get_cursor() as cursor: + cursor.execute("SELECT 1") + result = cursor.fetchone() + + assert result is not None, "SELECT 1 ๊ฒฐ๊ณผ๊ฐ€ None์ž…๋‹ˆ๋‹ค" + assert result[0] == 1, f"SELECT 1 ๊ฒฐ๊ณผ๊ฐ€ 1์ด ์•„๋‹™๋‹ˆ๋‹ค: {result[0]}" + + cursor.execute("SELECT NOW()") + time_result = cursor.fetchone() + assert time_result is not None, "NOW() ๊ฒฐ๊ณผ๊ฐ€ None์ž…๋‹ˆ๋‹ค" + + cursor.execute("SELECT VERSION()") + version_result = cursor.fetchone() + assert version_result is not None, "VERSION() ๊ฒฐ๊ณผ๊ฐ€ None์ž…๋‹ˆ๋‹ค" + + except Exception as e: + pytest.fail(f"์ปค์„œ ์ปจํ…์ŠคํŠธ ๋งค๋‹ˆ์ € ํ…Œ์ŠคํŠธ ์‹คํŒจ: {e}") + + def test_connection_context_manager_with_select1(self): + """์ปค๋„ฅ์…˜ ์ปจํ…์ŠคํŠธ ๋งค๋‹ˆ์ € ๋ฐ SELECT 1 ํ…Œ์ŠคํŠธ""" + + manager = MariadbManager() + + try: + with manager.get_connection() as conn: + cursor = conn.cursor() + + try: + cursor.execute("SELECT 1") + result = cursor.fetchone() + + assert result is not None, "SELECT 1 ๊ฒฐ๊ณผ๊ฐ€ None์ž…๋‹ˆ๋‹ค" + assert result[0] == 1, f"SELECT 1 ๊ฒฐ๊ณผ๊ฐ€ 1์ด ์•„๋‹™๋‹ˆ๋‹ค: {result[0]}" + + cursor.execute("SELECT CONNECTION_ID()") + conn_info = cursor.fetchone() + assert conn_info is not None, "CONNECTION_ID() ๊ฒฐ๊ณผ๊ฐ€ None์ž…๋‹ˆ๋‹ค" + + cursor.execute("SELECT USER()") + user_info = cursor.fetchone() + assert user_info is not None, "USER() ๊ฒฐ๊ณผ๊ฐ€ None์ž…๋‹ˆ๋‹ค" + + finally: + cursor.close() + + except Exception as e: + pytest.fail(f"์ปค๋„ฅ์…˜ ์ปจํ…์ŠคํŠธ ๋งค๋‹ˆ์ € ํ…Œ์ŠคํŠธ ์‹คํŒจ: {e}") diff --git a/apps/pre-processing-service/app/test/test_match_service.py b/apps/pre-processing-service/app/test/test_match_service.py index 7b80c258..7750cd3d 100644 --- a/apps/pre-processing-service/app/test/test_match_service.py +++ b/apps/pre-processing-service/app/test/test_match_service.py @@ -10,16 +10,16 @@ def test_match_success(): sample_search_results = [ { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=123", - "title": "925 ์‹ค๋ฒ„ ๋ฐ˜์ง€ ์—ฌ์„ฑ์šฉ ๊ฒฐํ˜ผ๋ฐ˜์ง€" + "title": "925 ์‹ค๋ฒ„ ๋ฐ˜์ง€ ์—ฌ์„ฑ์šฉ ๊ฒฐํ˜ผ๋ฐ˜์ง€", }, { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=456", - "title": "๊ณจ๋“œ ๋ชฉ๊ฑธ์ด ์ฒด์ธ ํŽœ๋˜ํŠธ" + "title": "๊ณจ๋“œ ๋ชฉ๊ฑธ์ด ์ฒด์ธ ํŽœ๋˜ํŠธ", }, { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=789", - "title": "๋ฐ˜์ง€ ์„ธํŠธ ์ปคํ”Œ๋ง ์•ฝํ˜ผ๋ฐ˜์ง€" - } + "title": "๋ฐ˜์ง€ ์„ธํŠธ ์ปคํ”Œ๋ง ์•ฝํ˜ผ๋ฐ˜์ง€", + }, ] body = { @@ -27,10 +27,10 @@ def test_match_success(): "schedule_id": 1, "schedule_his_id": 1, "keyword": "๋ฐ˜์ง€", - "search_results": sample_search_results + "search_results": sample_search_results, } - response = client.post("/product/match", json=body) + response = client.post("/products/match", json=body) print(f"Match Response: {response.json()}") assert response.status_code == 200 @@ -55,10 +55,10 @@ def test_match_no_results(): "schedule_id": 2, "schedule_his_id": 2, "keyword": "๋ฐ˜์ง€", - "search_results": [] + "search_results": [], } - response = client.post("/product/match", json=body) + response = client.post("/products/match", json=body) print(f"No results response: {response.json()}") assert response.status_code == 200 @@ -71,12 +71,12 @@ def test_match_no_matches(): sample_search_results = [ { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=123", - "title": "์ปดํ“จํ„ฐ ํ‚ค๋ณด๋“œ ๊ฒŒ์ด๋ฐ" + "title": "์ปดํ“จํ„ฐ ํ‚ค๋ณด๋“œ ๊ฒŒ์ด๋ฐ", }, { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=456", - "title": "์Šค๋งˆํŠธํฐ ์ผ€์ด์Šค ํˆฌ๋ช…" - } + "title": "์Šค๋งˆํŠธํฐ ์ผ€์ด์Šค ํˆฌ๋ช…", + }, ] body = { @@ -84,14 +84,14 @@ def test_match_no_matches(): "schedule_id": 3, "schedule_his_id": 3, "keyword": "๋ฐ˜์ง€", - "search_results": sample_search_results + "search_results": sample_search_results, } - response = client.post("/product/match", json=body) + response = client.post("/products/match", json=body) print(f"No matches response: {response.json()}") assert response.status_code == 200 data = response.json() # ๋งค์นญ๋˜์ง€ ์•Š์•„๋„ ์„ฑ๊ณต์œผ๋กœ ์ฒ˜๋ฆฌ assert data["status"] == "success" - assert isinstance(data["matched_products"], list) \ No newline at end of file + assert isinstance(data["matched_products"], list) diff --git a/apps/pre-processing-service/app/test/test_sadagu_crawl.py b/apps/pre-processing-service/app/test/test_sadagu_crawl.py index d034be43..6c6ad84a 100644 --- a/apps/pre-processing-service/app/test/test_sadagu_crawl.py +++ b/apps/pre-processing-service/app/test/test_sadagu_crawl.py @@ -13,10 +13,10 @@ def test_crawl_success(): "tag": "detail", "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=886788894790", "use_selenium": False, - "include_images": False + "include_images": False, } - response = client.post("/product/crawl", json=body) + response = client.post("/products/crawl", json=body) print(f"Response: {response.json()}") assert response.status_code == 200 @@ -27,62 +27,62 @@ def test_crawl_success(): assert "product_detail" in data -def test_crawl_invalid_url(): - """์ž˜๋ชป๋œ URL์ด์ง€๋งŒ ํŽ˜์ด์ง€๋Š” ์กด์žฌํ•˜๋Š” ๊ฒฝ์šฐ""" - body = { - "job_id": 2, - "schedule_id": 2, - "schedule_his_id": 2, - "tag": "detail", - "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=invalid", - "use_selenium": False, - "include_images": False - } - - response = client.post("/product/crawl", json=body) - print(f"Response: {response.json()}") - - assert response.status_code == 200 - data = response.json() - - product_detail = data.get("product_detail", {}) - assert product_detail.get("title") in ["์ œ๋ชฉ ์—†์Œ", "์ œ๋ชฉ ์ถ”์ถœ ์‹คํŒจ", None] - assert product_detail.get("price", 0) == 0 - - -def test_crawl_completely_invalid_url(): - """์™„์ „ํžˆ ์กด์žฌํ•˜์ง€ ์•Š๋Š” ๋„๋ฉ”์ธ""" - body = { - "job_id": 3, - "schedule_id": 3, - "schedule_his_id": 3, - "tag": "detail", - "product_url": "https://nonexistent-domain-12345.com/invalid", - "use_selenium": False, - "include_images": False - } - - response = client.post("/product/crawl", json=body) - print(f"Response: {response.json()}") - - assert response.status_code in (400, 422, 500) - - -def test_crawl_include_images(): - body = { - "job_id": 4, - "schedule_id": 4, - "schedule_his_id": 4, - "tag": "detail", - "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=886788894790", - "use_selenium": False, - "include_images": True - } - - response = client.post("/product/crawl", json=body) - print(f"Response: {response.json()}") - - assert response.status_code == 200 - data = response.json() - assert data["include_images"] is True - assert isinstance(data["product_detail"].get("product_images"), list) \ No newline at end of file +# def test_crawl_invalid_url(): +# """์ž˜๋ชป๋œ URL์ด์ง€๋งŒ ํŽ˜์ด์ง€๋Š” ์กด์žฌํ•˜๋Š” ๊ฒฝ์šฐ""" +# body = { +# "job_id": 2, +# "schedule_id": 2, +# "schedule_his_id": 2, +# "tag": "detail", +# "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=invalid", +# "use_selenium": False, +# "include_images": False, +# } +# +# response = client.post("/products/crawl", json=body) +# print(f"Response: {response.json()}") +# +# assert response.status_code == 200 +# data = response.json() +# +# product_detail = data.get("product_detail", {}) +# assert product_detail.get("title") in ["์ œ๋ชฉ ์—†์Œ", "์ œ๋ชฉ ์ถ”์ถœ ์‹คํŒจ", None] +# assert product_detail.get("price", 0) == 0 + + +# def test_crawl_completely_invalid_url(): +# """์™„์ „ํžˆ ์กด์žฌํ•˜์ง€ ์•Š๋Š” ๋„๋ฉ”์ธ""" +# body = { +# "job_id": 3, +# "schedule_id": 3, +# "schedule_his_id": 3, +# "tag": "detail", +# "product_url": "https://nonexistent-domain-12345.com/invalid", +# "use_selenium": False, +# "include_images": False, +# } +# +# response = client.post("/products/crawl", json=body) +# print(f"Response: {response.json()}") +# +# assert response.status_code in (400, 422, 500) + + +# def test_crawl_include_images(): +# body = { +# "job_id": 4, +# "schedule_id": 4, +# "schedule_his_id": 4, +# "tag": "detail", +# "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=886788894790", +# "use_selenium": False, +# "include_images": True, +# } +# +# response = client.post("/products/crawl", json=body) +# print(f"Response: {response.json()}") +# +# assert response.status_code == 200 +# data = response.json() +# assert data["include_images"] is True +# assert isinstance(data["product_detail"].get("product_images"), list) diff --git a/apps/pre-processing-service/app/test/test_search_service.py b/apps/pre-processing-service/app/test/test_search_service.py index 6dd415e0..fc64c9cd 100644 --- a/apps/pre-processing-service/app/test/test_search_service.py +++ b/apps/pre-processing-service/app/test/test_search_service.py @@ -7,14 +7,9 @@ def test_search_success(): """์ƒํ’ˆ ๊ฒ€์ƒ‰ ์„ฑ๊ณต ํ…Œ์ŠคํŠธ""" - body = { - "job_id": 1, - "schedule_id": 1, - "schedule_his_id": 1, - "keyword": "๋ฐ˜์ง€" - } + body = {"job_id": 1, "schedule_id": 1, "schedule_his_id": 1, "keyword": "๋ฐ˜์ง€"} - response = client.post("/product/search", json=body) + response = client.post("/products/search", json=body) print(f"Search Response: {response.json()}") assert response.status_code == 200 @@ -27,14 +22,9 @@ def test_search_success(): def test_search_empty_keyword(): """๋นˆ ํ‚ค์›Œ๋“œ ๊ฒ€์ƒ‰ ํ…Œ์ŠคํŠธ""" - body = { - "job_id": 2, - "schedule_id": 2, - "schedule_his_id": 2, - "keyword": "" - } + body = {"job_id": 2, "schedule_id": 2, "schedule_his_id": 2, "keyword": ""} - response = client.post("/product/search", json=body) + response = client.post("/products/search", json=body) print(f"Empty keyword response: {response.json()}") # ๋นˆ ํ‚ค์›Œ๋“œ๋ผ๋„ ์—๋Ÿฌ๊ฐ€ ์•„๋‹Œ ๋นˆ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ˜ํ™˜ํ•ด์•ผ ํ•จ @@ -49,14 +39,14 @@ def test_search_nonexistent_keyword(): "job_id": 3, "schedule_id": 3, "schedule_his_id": 3, - "keyword": "zxcvbnmasdfghjklqwertyuiop123456789" + "keyword": "zxcvbnmasdfghjklqwertyuiop123456789", } - response = client.post("/product/search", json=body) + response = client.post("/products/search", json=body) print(f"Nonexistent keyword response: {response.json()}") assert response.status_code == 200 data = response.json() # ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์–ด๋„ ์„ฑ๊ณต์œผ๋กœ ์ฒ˜๋ฆฌ assert data["status"] == "success" - assert isinstance(data["search_results"], list) \ No newline at end of file + assert isinstance(data["search_results"], list) diff --git a/apps/pre-processing-service/app/test/test_similarity_service.py b/apps/pre-processing-service/app/test/test_similarity_service.py index 1888b873..cb84d3c3 100644 --- a/apps/pre-processing-service/app/test/test_similarity_service.py +++ b/apps/pre-processing-service/app/test/test_similarity_service.py @@ -14,8 +14,8 @@ def test_similarity_with_matched_products(): "match_info": { "match_type": "exact", "match_score": 1.0, - "match_reason": "์™„์ „ ๋งค์นญ" - } + "match_reason": "์™„์ „ ๋งค์นญ", + }, }, { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=456", @@ -23,9 +23,9 @@ def test_similarity_with_matched_products(): "match_info": { "match_type": "morphological", "match_score": 0.8, - "match_reason": "ํ˜•ํƒœ์†Œ ๋งค์นญ" - } - } + "match_reason": "ํ˜•ํƒœ์†Œ ๋งค์นญ", + }, + }, ] body = { @@ -33,10 +33,10 @@ def test_similarity_with_matched_products(): "schedule_id": 1, "schedule_his_id": 1, "keyword": "๋ฐ˜์ง€", - "matched_products": matched_products + "matched_products": matched_products, } - response = client.post("/product/similarity", json=body) + response = client.post("/products/similarity", json=body) print(f"Similarity Response: {response.json()}") assert response.status_code == 200 @@ -56,12 +56,12 @@ def test_similarity_fallback_to_search_results(): search_results = [ { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=123", - "title": "์‹ค๋ฒ„ ๋ง ์•…์„ธ์„œ๋ฆฌ" + "title": "์‹ค๋ฒ„ ๋ง ์•…์„ธ์„œ๋ฆฌ", }, { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=456", - "title": "๊ณจ๋“œ ๋ฐ˜์ง€ ์—ฌ์„ฑ" - } + "title": "๊ณจ๋“œ ๋ฐ˜์ง€ ์—ฌ์„ฑ", + }, ] body = { @@ -70,10 +70,10 @@ def test_similarity_fallback_to_search_results(): "schedule_his_id": 2, "keyword": "๋ฐ˜์ง€", "matched_products": [], # ๋งค์นญ๋œ ์ƒํ’ˆ ์—†์Œ - "search_results": search_results # ํด๋ฐฑ์šฉ + "search_results": search_results, # ํด๋ฐฑ์šฉ } - response = client.post("/product/similarity", json=body) + response = client.post("/products/similarity", json=body) print(f"Fallback Response: {response.json()}") assert response.status_code == 200 @@ -83,7 +83,10 @@ def test_similarity_fallback_to_search_results(): # ํด๋ฐฑ ๋ชจ๋“œ์—์„œ๋Š” ์ž„๊ณ„๊ฐ’์„ ํ†ต๊ณผํ•œ ๊ฒฝ์šฐ์—๋งŒ ์ƒํ’ˆ์ด ์„ ํƒ๋จ if data["selected_product"]: assert "similarity_info" in data["selected_product"] - assert data["selected_product"]["similarity_info"]["analysis_mode"] == "fallback_similarity_only" + assert ( + data["selected_product"]["similarity_info"]["analysis_mode"] + == "fallback_similarity_only" + ) def test_similarity_single_candidate(): @@ -92,10 +95,7 @@ def test_similarity_single_candidate(): { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=123", "title": "925 ์‹ค๋ฒ„ ๋ฐ˜์ง€ ์—ฌ์„ฑ์šฉ", - "match_info": { - "match_type": "exact", - "match_score": 1.0 - } + "match_info": {"match_type": "exact", "match_score": 1.0}, } ] @@ -104,16 +104,19 @@ def test_similarity_single_candidate(): "schedule_id": 3, "schedule_his_id": 3, "keyword": "๋ฐ˜์ง€", - "matched_products": single_product + "matched_products": single_product, } - response = client.post("/product/similarity", json=body) + response = client.post("/products/similarity", json=body) print(f"Single candidate response: {response.json()}") assert response.status_code == 200 data = response.json() assert data["selected_product"] is not None - assert data["selected_product"]["similarity_info"]["analysis_type"] == "single_candidate" + assert ( + data["selected_product"]["similarity_info"]["analysis_type"] + == "single_candidate" + ) def test_similarity_no_candidates(): @@ -124,13 +127,13 @@ def test_similarity_no_candidates(): "schedule_his_id": 4, "keyword": "๋ฐ˜์ง€", "matched_products": [], - "search_results": [] + "search_results": [], } - response = client.post("/product/similarity", json=body) + response = client.post("/products/similarity", json=body) print(f"No candidates response: {response.json()}") assert response.status_code == 200 data = response.json() assert data["selected_product"] is None - assert "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ๋ชจ๋‘ ์—†์Œ" in data["reason"] \ No newline at end of file + assert "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ๋ชจ๋‘ ์—†์Œ" in data["reason"] diff --git a/apps/pre-processing-service/app/utils/crawler_utils.py b/apps/pre-processing-service/app/utils/crawler_utils.py index 8246788a..5c593b9f 100644 --- a/apps/pre-processing-service/app/utils/crawler_utils.py +++ b/apps/pre-processing-service/app/utils/crawler_utils.py @@ -8,6 +8,7 @@ from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.common.exceptions import TimeoutException, NoSuchElementException +from loguru import logger class SearchCrawler: @@ -23,21 +24,21 @@ def __init__(self, use_selenium=True): def _setup_selenium(self): """Selenium WebDriver ์ดˆ๊ธฐํ™”""" chrome_options = Options() - chrome_options.add_argument('--headless') - chrome_options.add_argument('--no-sandbox') - chrome_options.add_argument('--disable-dev-shm-usage') - chrome_options.add_argument('--disable-gpu') - chrome_options.add_argument('--window-size=1920,1080') + chrome_options.add_argument("--headless") + chrome_options.add_argument("--no-sandbox") + chrome_options.add_argument("--disable-dev-shm-usage") + chrome_options.add_argument("--disable-gpu") + chrome_options.add_argument("--window-size=1920,1080") chrome_options.add_argument( - '--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' + "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36" ) try: self.driver = webdriver.Chrome(options=chrome_options) self.wait = WebDriverWait(self.driver, 10) - print("Selenium WebDriver ์ดˆ๊ธฐํ™” ์™„๋ฃŒ") + logger.info("Selenium WebDriver ์ดˆ๊ธฐํ™” ์™„๋ฃŒ") except Exception as e: - print(f"Selenium ์ดˆ๊ธฐํ™” ์‹คํŒจ, httpx๋กœ ๋Œ€์ฒด: {e}") + logger.warning(f"Selenium ์ดˆ๊ธฐํ™” ์‹คํŒจ, httpx๋กœ ๋Œ€์ฒด: {e}") self.use_selenium = False self._setup_httpx() @@ -45,10 +46,11 @@ def _setup_httpx(self): """httpx ํด๋ผ์ด์–ธํŠธ ์ดˆ๊ธฐํ™”""" self.client = httpx.AsyncClient( headers={ - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36" }, - timeout=30.0 + timeout=30.0, ) + logger.info("httpx ํด๋ผ์ด์–ธํŠธ ์ดˆ๊ธฐํ™” ์™„๋ฃŒ") async def search_products_selenium(self, keyword: str) -> list[dict]: """Selenium์„ ์‚ฌ์šฉํ•œ ์ƒํ’ˆ ๊ฒ€์ƒ‰""" @@ -56,6 +58,9 @@ async def search_products_selenium(self, keyword: str) -> list[dict]: search_url = f"{self.base_url}/shop/search.php?ss_tx={encoded_keyword}" try: + logger.info( + f"Selenium ์ƒํ’ˆ ๊ฒ€์ƒ‰ ์‹œ์ž‘: keyword='{keyword}', url='{search_url}'" + ) self.driver.get(search_url) time.sleep(5) @@ -63,34 +68,34 @@ async def search_products_selenium(self, keyword: str) -> list[dict]: link_elements = self.driver.find_elements(By.TAG_NAME, "a") for element in link_elements: - href = element.get_attribute('href') - if href and 'view.php' in href and ('platform=1688' in href or 'num_iid' in href): + href = element.get_attribute("href") + if ( + href + and "view.php" in href + and ("platform=1688" in href or "num_iid" in href) + ): try: - title = element.get_attribute('title') or element.text.strip() + title = element.get_attribute("title") or element.text.strip() if title: - product_links.append({ - 'url': href, - 'title': title - }) + product_links.append({"url": href, "title": title}) except: - product_links.append({ - 'url': href, - 'title': 'Unknown Title' - }) + product_links.append({"url": href, "title": "Unknown Title"}) # ์ค‘๋ณต ์ œ๊ฑฐ seen_urls = set() unique_products = [] for product in product_links: - if product['url'] not in seen_urls: - seen_urls.add(product['url']) + if product["url"] not in seen_urls: + seen_urls.add(product["url"]) unique_products.append(product) - print(f"Selenium์œผ๋กœ ๋ฐœ๊ฒฌํ•œ ์ƒํ’ˆ ๋งํฌ: {len(unique_products)}๊ฐœ") + logger.info( + f"Selenium์œผ๋กœ ๋ฐœ๊ฒฌํ•œ ์ƒํ’ˆ ๋งํฌ: {len(unique_products)}๊ฐœ (์ค‘๋ณต ์ œ๊ฑฐ ์ „: {len(product_links)}๊ฐœ)" + ) return unique_products[:20] except Exception as e: - print(f"Selenium ๊ฒ€์ƒ‰ ์˜ค๋ฅ˜: {e}") + logger.error(f"Selenium ๊ฒ€์ƒ‰ ์˜ค๋ฅ˜: keyword='{keyword}', error='{e}'") return [] async def search_products_httpx(self, keyword: str) -> list[dict]: @@ -99,75 +104,94 @@ async def search_products_httpx(self, keyword: str) -> list[dict]: search_url = f"{self.base_url}/shop/search.php?ss_tx={encoded_keyword}" try: + logger.info( + f"httpx ์ƒํ’ˆ ๊ฒ€์ƒ‰ ์‹œ์ž‘: keyword='{keyword}', url='{search_url}'" + ) response = await self.client.get(search_url) response.raise_for_status() - soup = BeautifulSoup(response.content, 'html.parser') + soup = BeautifulSoup(response.content, "html.parser") product_links = [] - all_links = soup.find_all('a', href=True) + all_links = soup.find_all("a", href=True) for link in all_links: - href = link['href'] - if 'view.php' in href and ('platform=1688' in href or 'num_iid' in href): - full_url = f"{self.base_url}{href}" if href.startswith('/') else href - title = link.get('title', '') or link.get_text(strip=True) or 'Unknown Title' - - product_links.append({ - 'url': full_url, - 'title': title - }) - - print(f"httpx๋กœ ๋ฐœ๊ฒฌํ•œ ์ƒํ’ˆ ๋งํฌ: {len(product_links)}๊ฐœ") + href = link["href"] + if "view.php" in href and ( + "platform=1688" in href or "num_iid" in href + ): + full_url = ( + f"{self.base_url}{href}" if href.startswith("/") else href + ) + title = ( + link.get("title", "") + or link.get_text(strip=True) + or "Unknown Title" + ) + + product_links.append({"url": full_url, "title": title}) + + logger.info(f"httpx๋กœ ๋ฐœ๊ฒฌํ•œ ์ƒํ’ˆ ๋งํฌ: {len(product_links)}๊ฐœ") return product_links[:20] except Exception as e: - print(f"httpx ๊ฒ€์ƒ‰ ์˜ค๋ฅ˜: {e}") + logger.error(f"httpx ๊ฒ€์ƒ‰ ์˜ค๋ฅ˜: keyword='{keyword}', error='{e}'") return [] async def get_basic_product_info(self, product_url: str) -> dict: """๊ธฐ๋ณธ ์ƒํ’ˆ ์ •๋ณด๋งŒ ํฌ๋กค๋ง""" try: + logger.debug(f"๊ธฐ๋ณธ ์ƒํ’ˆ ์ •๋ณด ํฌ๋กค๋ง ์‹œ์ž‘: url='{product_url}'") + if self.use_selenium: self.driver.get(product_url) - self.wait.until(lambda driver: driver.execute_script("return document.readyState") == "complete") - soup = BeautifulSoup(self.driver.page_source, 'html.parser') + self.wait.until( + lambda driver: driver.execute_script("return document.readyState") + == "complete" + ) + soup = BeautifulSoup(self.driver.page_source, "html.parser") else: response = await self.client.get(product_url) response.raise_for_status() - soup = BeautifulSoup(response.content, 'html.parser') + soup = BeautifulSoup(response.content, "html.parser") - title_element = soup.find('h1', {'id': 'kakaotitle'}) + title_element = soup.find("h1", {"id": "kakaotitle"}) title = title_element.get_text(strip=True) if title_element else "์ œ๋ชฉ ์—†์Œ" - return { - 'url': product_url, - 'title': title - } + logger.debug(f"๊ธฐ๋ณธ ์ƒํ’ˆ ์ •๋ณด ํฌ๋กค๋ง ์™„๋ฃŒ: title='{title[:50]}'") + return {"url": product_url, "title": title} except Exception as e: - print(f"๊ธฐ๋ณธ ์ƒํ’ˆ ํฌ๋กค๋ง ์˜ค๋ฅ˜ ({product_url}): {e}") + logger.error(f"๊ธฐ๋ณธ ์ƒํ’ˆ ํฌ๋กค๋ง ์˜ค๋ฅ˜: url='{product_url}', error='{e}'") return None async def close(self): """๋ฆฌ์†Œ์Šค ์ •๋ฆฌ""" - if self.use_selenium and hasattr(self, 'driver'): + if self.use_selenium and hasattr(self, "driver"): try: self.driver.quit() - except Exception: - pass - elif hasattr(self, 'client'): + logger.info("Selenium WebDriver ์ข…๋ฃŒ ์™„๋ฃŒ") + except Exception as e: + logger.warning(f"Selenium WebDriver ์ข…๋ฃŒ ์ค‘ ์˜ค๋ฅ˜: {e}") + elif hasattr(self, "client"): try: await self.client.aclose() - except Exception: - pass + logger.info("httpx ํด๋ผ์ด์–ธํŠธ ์ข…๋ฃŒ ์™„๋ฃŒ") + except Exception as e: + logger.warning(f"httpx ํด๋ผ์ด์–ธํŠธ ์ข…๋ฃŒ ์ค‘ ์˜ค๋ฅ˜: {e}") class DetailCrawler(SearchCrawler): """SearchCrawler๋ฅผ ํ™•์žฅํ•œ ์ƒ์„ธ ํฌ๋กค๋ง ํด๋ž˜์Šค""" - async def crawl_detail(self, product_url: str, include_images: bool = False) -> dict: + async def crawl_detail( + self, product_url: str, include_images: bool = False + ) -> dict: """์ƒํ’ˆ ์ƒ์„ธ ์ •๋ณด ํฌ๋กค๋ง""" try: + logger.info( + f"์ƒํ’ˆ ์ƒ์„ธ ํฌ๋กค๋ง ์‹œ์ž‘: url='{product_url}', include_images={include_images}" + ) + if self.use_selenium: soup = await self._get_soup_selenium(product_url) else: @@ -181,160 +205,203 @@ async def crawl_detail(self, product_url: str, include_images: bool = False) -> material_info = self._extract_material_info(soup) product_data = { - 'url': product_url, - 'title': title, - 'price': price, - 'rating': rating, - 'options': options, - 'material_info': material_info, - 'crawled_at': time.strftime('%Y-%m-%d %H:%M:%S') + "url": product_url, + "title": title, + "price": price, + "rating": rating, + "options": options, + "material_info": material_info, + "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), } + logger.info( + f"๊ธฐ๋ณธ ์ƒํ’ˆ ์ •๋ณด ์ถ”์ถœ ์™„๋ฃŒ: title='{title[:50]}', price={price}, rating={rating}, options_count={len(options)}" + ) + if include_images: - print("์ด๋ฏธ์ง€ ์ •๋ณด ์ถ”์ถœ ์ค‘...") + logger.info("์ด๋ฏธ์ง€ ์ •๋ณด ์ถ”์ถœ ์ค‘...") product_images = self._extract_images(soup) - product_data['product_images'] = [{'original_url': img_url} for img_url in product_images] - print(f"์ถ”์ถœ๋œ ์ด๋ฏธ์ง€: {len(product_images)}๊ฐœ") + product_data["product_images"] = [ + {"original_url": img_url} for img_url in product_images + ] + logger.info(f"์ถ”์ถœ๋œ ์ด๋ฏธ์ง€: {len(product_images)}๊ฐœ") else: - product_data['product_images'] = [] + product_data["product_images"] = [] + logger.info(f"์ƒํ’ˆ ์ƒ์„ธ ํฌ๋กค๋ง ์™„๋ฃŒ: url='{product_url}'") return product_data except Exception as e: - print(f"ํฌ๋กค๋ง ์˜ค๋ฅ˜: {e}") + logger.error(f"์ƒํ’ˆ ์ƒ์„ธ ํฌ๋กค๋ง ์˜ค๋ฅ˜: url='{product_url}', error='{e}'") raise Exception(f"ํฌ๋กค๋ง ์‹คํŒจ: {str(e)}") async def _get_soup_selenium(self, product_url: str) -> BeautifulSoup: """Selenium์œผ๋กœ HTML ๊ฐ€์ ธ์˜ค๊ธฐ""" try: + logger.debug(f"Selenium HTML ๋กœ๋”ฉ ์‹œ์ž‘: url='{product_url}'") self.driver.get(product_url) - self.wait.until(lambda driver: driver.execute_script("return document.readyState") == "complete") + self.wait.until( + lambda driver: driver.execute_script("return document.readyState") + == "complete" + ) time.sleep(2) - return BeautifulSoup(self.driver.page_source, 'html.parser') + logger.debug("Selenium HTML ๋กœ๋”ฉ ์™„๋ฃŒ") + return BeautifulSoup(self.driver.page_source, "html.parser") except Exception as e: + logger.error(f"Selenium HTML ๋กœ๋”ฉ ์‹คํŒจ: url='{product_url}', error='{e}'") raise Exception(f"Selenium HTML ๋กœ๋”ฉ ์‹คํŒจ: {e}") async def _get_soup_httpx(self, product_url: str) -> BeautifulSoup: """httpx๋กœ HTML ๊ฐ€์ ธ์˜ค๊ธฐ""" try: + logger.debug(f"httpx HTML ์š”์ฒญ ์‹œ์ž‘: url='{product_url}'") response = await self.client.get(product_url) response.raise_for_status() - return BeautifulSoup(response.content, 'html.parser') + logger.debug("httpx HTML ์š”์ฒญ ์™„๋ฃŒ") + return BeautifulSoup(response.content, "html.parser") except Exception as e: + logger.error(f"httpx HTML ์š”์ฒญ ์‹คํŒจ: url='{product_url}', error='{e}'") raise Exception(f"HTTP ์š”์ฒญ ์‹คํŒจ: {e}") def _extract_title(self, soup: BeautifulSoup) -> str: """์ œ๋ชฉ ์ถ”์ถœ""" - title_element = soup.find('h1', {'id': 'kakaotitle'}) - return title_element.get_text(strip=True) if title_element else "์ œ๋ชฉ ์—†์Œ" + title_element = soup.find("h1", {"id": "kakaotitle"}) + title = title_element.get_text(strip=True) if title_element else "์ œ๋ชฉ ์—†์Œ" + logger.debug(f"์ œ๋ชฉ ์ถ”์ถœ: '{title[:50]}'") + return title def _extract_price(self, soup: BeautifulSoup) -> int: """๊ฐ€๊ฒฉ ์ถ”์ถœ""" price = 0 price_selectors = [ - 'span.price.gsItemPriceKWR', - '.pdt_price span.price', - 'span.price', - '.price' + "span.price.gsItemPriceKWR", + ".pdt_price span.price", + "span.price", + ".price", ] for selector in price_selectors: price_element = soup.select_one(selector) if price_element: - price_text = price_element.get_text(strip=True).replace(',', '').replace('์›', '') - price_match = re.search(r'(\d+)', price_text) + price_text = ( + price_element.get_text(strip=True) + .replace(",", "") + .replace("์›", "") + ) + price_match = re.search(r"(\d+)", price_text) if price_match: price = int(price_match.group(1)) + logger.debug(f"๊ฐ€๊ฒฉ ์ถ”์ถœ ์„ฑ๊ณต: {price}์› (selector: {selector})") break + + if price == 0: + logger.debug("๊ฐ€๊ฒฉ ์ถ”์ถœ ์‹คํŒจ - 0์›์œผ๋กœ ์„ค์ •") + return price def _extract_rating(self, soup: BeautifulSoup) -> float: """ํ‰์  ์ถ”์ถœ""" rating = 0.0 star_containers = [ - soup.find('a', class_='start'), - soup.find('div', class_=re.compile(r'star|rating')), - soup.find('a', href='#reviews_wrap') + soup.find("a", class_="start"), + soup.find("div", class_=re.compile(r"star|rating")), + soup.find("a", href="#reviews_wrap"), ] for container in star_containers: if container: - star_imgs = container.find_all('img') + star_imgs = container.find_all("img") for img in star_imgs: - src = img.get('src', '') - if 'icon_star.svg' in src: + src = img.get("src", "") + if "icon_star.svg" in src: rating += 1 - elif 'icon_star_half.svg' in src: + elif "icon_star_half.svg" in src: rating += 0.5 - break + if rating > 0: + logger.debug(f"ํ‰์  ์ถ”์ถœ ์„ฑ๊ณต: {rating}์ ") + break + + if rating == 0.0: + logger.debug("ํ‰์  ์ถ”์ถœ ์‹คํŒจ - 0.0์ ์œผ๋กœ ์„ค์ •") + return rating def _extract_options(self, soup: BeautifulSoup) -> list[dict]: """์ƒํ’ˆ ์˜ต์…˜ ์ถ”์ถœ""" options = [] - sku_list = soup.find('ul', {'id': 'skubox'}) + sku_list = soup.find("ul", {"id": "skubox"}) if sku_list: - option_items = sku_list.find_all('li', class_=re.compile(r'imgWrapper')) + option_items = sku_list.find_all("li", class_=re.compile(r"imgWrapper")) + logger.debug(f"์˜ต์…˜ ํ•ญ๋ชฉ ๋ฐœ๊ฒฌ: {len(option_items)}๊ฐœ") + for item in option_items: - title_element = item.find('a', title=True) + title_element = item.find("a", title=True) if title_element: - option_name = title_element.get('title', '').strip() + option_name = title_element.get("title", "").strip() # ์žฌ๊ณ  ์ •๋ณด ์ถ”์ถœ stock = 0 item_text = item.get_text() - stock_match = re.search(r'์žฌ๊ณ \s*:\s*(\d+)', item_text) + stock_match = re.search(r"์žฌ๊ณ \s*:\s*(\d+)", item_text) if stock_match: stock = int(stock_match.group(1)) # ์ด๋ฏธ์ง€ URL ์ถ”์ถœ - img_element = item.find('img', class_='colorSpec_hashPic') + img_element = item.find("img", class_="colorSpec_hashPic") image_url = "" - if img_element and img_element.get('src'): - image_url = img_element['src'] + if img_element and img_element.get("src"): + image_url = img_element["src"] if option_name: - options.append({ - 'name': option_name, - 'stock': stock, - 'image_url': image_url - }) - + options.append( + { + "name": option_name, + "stock": stock, + "image_url": image_url, + } + ) + logger.debug(f"์˜ต์…˜ ์ถ”์ถœ: name='{option_name}', stock={stock}") + + logger.info(f"์ด {len(options)}๊ฐœ ์˜ต์…˜ ์ถ”์ถœ ์™„๋ฃŒ") return options def _extract_material_info(self, soup: BeautifulSoup) -> dict: """์†Œ์žฌ ์ •๋ณด ์ถ”์ถœ""" material_info = {} - info_items = soup.find_all('div', class_='pro-info-item') + info_items = soup.find_all("div", class_="pro-info-item") for item in info_items: - title_element = item.find('div', class_='pro-info-title') - info_element = item.find('div', class_='pro-info-info') + title_element = item.find("div", class_="pro-info-title") + info_element = item.find("div", class_="pro-info-info") if title_element and info_element: title = title_element.get_text(strip=True) info = info_element.get_text(strip=True) material_info[title] = info + logger.debug(f"์†Œ์žฌ ์ •๋ณด ์ถ”์ถœ: {title}='{info}'") + logger.info(f"์ด {len(material_info)}๊ฐœ ์†Œ์žฌ ์ •๋ณด ์ถ”์ถœ ์™„๋ฃŒ") return material_info def _extract_images(self, soup: BeautifulSoup) -> list[str]: """์ƒํ’ˆ ์ด๋ฏธ์ง€ ์ถ”์ถœ""" images = [] - img_elements = soup.find_all('img', {'id': re.compile(r'img_translate_\d+')}) + img_elements = soup.find_all("img", {"id": re.compile(r"img_translate_\d+")}) for img in img_elements: - src = img.get('src', '') + src = img.get("src", "") if src: - if src.startswith('//'): - src = 'https:' + src - elif src.startswith('/'): + if src.startswith("//"): + src = "https:" + src + elif src.startswith("/"): src = self.base_url + src - elif src.startswith('http'): + elif src.startswith("http"): pass else: continue images.append(src) + logger.debug(f"์ด๋ฏธ์ง€ URL ์ถ”์ถœ: {src}") - return images \ No newline at end of file + logger.info(f"์ด {len(images)}๊ฐœ ์ด๋ฏธ์ง€ URL ์ถ”์ถœ ์™„๋ฃŒ") + return images diff --git a/apps/pre-processing-service/app/utils/crawling_util.py b/apps/pre-processing-service/app/utils/crawling_util.py index 8b0f1501..8ec47518 100644 --- a/apps/pre-processing-service/app/utils/crawling_util.py +++ b/apps/pre-processing-service/app/utils/crawling_util.py @@ -2,6 +2,7 @@ from selenium.webdriver.chrome.options import Options from selenium.webdriver.support.ui import WebDriverWait + class CrawlingUtil: def __init__(self): @@ -20,14 +21,16 @@ def _get_chrome_options(self): options = Options() - options.add_argument('--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36') + options.add_argument( + "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36" + ) # options.add_argument('--headless') ๋ฐฑ๊ทธ๋ผ์šด๋“œ ์‹คํ–‰์‹œ ์ฃผ์„ ํ•ด์ œ options.add_argument("--no-sandbox") options.add_argument("--disable-dev-shm-usage") options.add_argument("--disable-gpu") options.add_argument("--disable-extensions") options.add_experimental_option("excludeSwitches", ["enable-automation"]) - options.add_experimental_option('useAutomationExtension', False) + options.add_experimental_option("useAutomationExtension", False) options.add_argument("--disable-blink-features=AutomationControlled") return options diff --git a/apps/pre-processing-service/app/utils/keyword_matcher.py b/apps/pre-processing-service/app/utils/keyword_matcher.py index 8fab2730..e9ae48ac 100644 --- a/apps/pre-processing-service/app/utils/keyword_matcher.py +++ b/apps/pre-processing-service/app/utils/keyword_matcher.py @@ -1,12 +1,15 @@ from app.core.config import settings # pydantic_settings ๊ธฐ๋ฐ˜ +from loguru import logger try: import MeCab - print("MeCab ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ๋กœ๋”ฉ ์„ฑ๊ณต") + logger.info("MeCab ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ๋กœ๋”ฉ ์„ฑ๊ณต") MECAB_AVAILABLE = True except ImportError: - print("MeCab ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค. pip install mecab-python3 ๋ฅผ ์‹คํ–‰ํ•ด์ฃผ์„ธ์š”.") + logger.warning( + "MeCab ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค. pip install mecab-python3 ๋ฅผ ์‹คํ–‰ํ•ด์ฃผ์„ธ์š”." + ) MeCab = None MECAB_AVAILABLE = False @@ -30,49 +33,68 @@ def __init__(self): test_result = self.mecab.parse("ํ…Œ์ŠคํŠธ") if test_result and test_result.strip(): self.konlpy_available = True - print(f"MeCab ํ˜•ํƒœ์†Œ ๋ถ„์„๊ธฐ ์‚ฌ์šฉ ๊ฐ€๋Šฅ (๊ฒฝ๋กœ: {settings.mecab_path or '๊ธฐ๋ณธ'})") + logger.info( + f"MeCab ํ˜•ํƒœ์†Œ ๋ถ„์„๊ธฐ ์‚ฌ์šฉ ๊ฐ€๋Šฅ (๊ฒฝ๋กœ: {settings.mecab_path or '๊ธฐ๋ณธ'})" + ) else: - print("MeCab ํ…Œ์ŠคํŠธ ์‹คํŒจ") + logger.warning("MeCab ํ…Œ์ŠคํŠธ ์‹คํŒจ") except Exception as e: - print(f"MeCab ์‚ฌ์šฉ ๋ถˆ๊ฐ€ (๊ทœ์น™ ๊ธฐ๋ฐ˜์œผ๋กœ ๋Œ€์ฒด): {e}") + logger.error(f"MeCab ์‚ฌ์šฉ ๋ถˆ๊ฐ€ (๊ทœ์น™ ๊ธฐ๋ฐ˜์œผ๋กœ ๋Œ€์ฒด): {e}") else: - print("MeCab ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ๊ฐ€ ์„ค์น˜๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค. ๊ทœ์น™ ๊ธฐ๋ฐ˜์œผ๋กœ ๋Œ€์ฒดํ•ฉ๋‹ˆ๋‹ค.") + logger.warning( + "MeCab ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ๊ฐ€ ์„ค์น˜๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค. ๊ทœ์น™ ๊ธฐ๋ฐ˜์œผ๋กœ ๋Œ€์ฒดํ•ฉ๋‹ˆ๋‹ค." + ) def analyze_keyword_match(self, title: str, keyword: str) -> dict: """ํ‚ค์›Œ๋“œ ๋งค์นญ ๋ถ„์„ ๊ฒฐ๊ณผ ๋ฐ˜ํ™˜""" title_lower = title.lower().strip() keyword_lower = keyword.lower().strip() + logger.debug( + f"ํ‚ค์›Œ๋“œ ๋งค์นญ ๋ถ„์„ ์‹œ์ž‘: title='{title[:50]}', keyword='{keyword}'" + ) + # 1. ์™„์ „ ํฌํ•จ ๊ฒ€์‚ฌ exact_match = keyword_lower in title_lower if exact_match: + logger.info( + f"์™„์ „ ํฌํ•จ ๋งค์นญ ์„ฑ๊ณต: keyword='{keyword}' in title='{title[:50]}'" + ) return { - 'is_match': True, - 'match_type': 'exact', - 'score': 1.0, - 'reason': f"์™„์ „ ํฌํ•จ: '{keyword}' in '{title[:50]}'" + "is_match": True, + "match_type": "exact", + "score": 1.0, + "reason": f"์™„์ „ ํฌํ•จ: '{keyword}' in '{title[:50]}'", } # 2. ํ˜•ํƒœ์†Œ ๋ถ„์„ (MeCab ์‚ฌ์šฉ) if self.konlpy_available: morphological_result = self._morphological_match(title_lower, keyword_lower) - if morphological_result['is_match']: + if morphological_result["is_match"]: + logger.info(f"ํ˜•ํƒœ์†Œ ๋ถ„์„ ๋งค์นญ ์„ฑ๊ณต: {morphological_result['reason']}") return morphological_result # 3. ๊ทœ์น™ ๊ธฐ๋ฐ˜ ๋ถ„์„ (MeCab ์‹คํŒจ์‹œ) simple_result = self._simple_keyword_match(title_lower, keyword_lower) + if simple_result["is_match"]: + logger.info(f"๊ทœ์น™ ๊ธฐ๋ฐ˜ ๋งค์นญ ์„ฑ๊ณต: {simple_result['reason']}") + else: + logger.debug(f"๋งค์นญ ์‹คํŒจ: {simple_result['reason']}") + return simple_result def _morphological_match(self, title: str, keyword: str) -> dict: """ํ˜•ํƒœ์†Œ ๋ถ„์„ ๊ธฐ๋ฐ˜ ๋งค์นญ""" try: + logger.debug(f"ํ˜•ํƒœ์†Œ ๋ถ„์„ ์‹œ์ž‘: title='{title[:30]}', keyword='{keyword}'") + # ํ‚ค์›Œ๋“œ ํ˜•ํƒœ์†Œ ๋ถ„์„ keyword_result = self.mecab.parse(keyword) keyword_morphs = [] - for line in keyword_result.split('\n'): - if line == 'EOS' or line == '': + for line in keyword_result.split("\n"): + if line == "EOS" or line == "": continue - parts = line.split('\t') + parts = line.split("\t") if len(parts) >= 1: morph = parts[0].strip() if len(morph) >= 1: @@ -81,15 +103,19 @@ def _morphological_match(self, title: str, keyword: str) -> dict: # ์ œ๋ชฉ ํ˜•ํƒœ์†Œ ๋ถ„์„ title_result = self.mecab.parse(title) title_morphs = [] - for line in title_result.split('\n'): - if line == 'EOS' or line == '': + for line in title_result.split("\n"): + if line == "EOS" or line == "": continue - parts = line.split('\t') + parts = line.split("\t") if len(parts) >= 1: morph = parts[0].strip() if len(morph) >= 1: title_morphs.append(morph) + logger.debug( + f"ํ˜•ํƒœ์†Œ ์ถ”์ถœ ์™„๋ฃŒ: keyword_morphs={keyword_morphs}, title_morphs={title_morphs}" + ) + # ํ˜•ํƒœ์†Œ ๋งค์นญ matched = 0 for kw in keyword_morphs: @@ -97,52 +123,75 @@ def _morphological_match(self, title: str, keyword: str) -> dict: for tw in title_morphs: if kw == tw or kw in tw or tw in kw: matched += 1 + logger.debug(f"ํ˜•ํƒœ์†Œ ๋งค์นญ: '{kw}' <-> '{tw}'") break match_ratio = matched / len(keyword_morphs) if keyword_morphs else 0 threshold = 0.4 + logger.debug( + f"ํ˜•ํƒœ์†Œ ๋งค์นญ ๊ฒฐ๊ณผ: matched={matched}, total={len(keyword_morphs)}, ratio={match_ratio:.3f}, threshold={threshold}" + ) + if match_ratio >= threshold: return { - 'is_match': True, - 'match_type': 'morphological', - 'score': match_ratio, - 'reason': f"ํ˜•ํƒœ์†Œ ๋งค์นญ: {matched}/{len(keyword_morphs)} = {match_ratio:.3f}" + "is_match": True, + "match_type": "morphological", + "score": match_ratio, + "reason": f"ํ˜•ํƒœ์†Œ ๋งค์นญ: {matched}/{len(keyword_morphs)} = {match_ratio:.3f}", } except Exception as e: - print(f"ํ˜•ํƒœ์†Œ ๋ถ„์„ ์˜ค๋ฅ˜: {e}") + logger.error( + f"ํ˜•ํƒœ์†Œ ๋ถ„์„ ์˜ค๋ฅ˜: keyword='{keyword}', title='{title[:30]}', error='{e}'" + ) - return {'is_match': False, 'match_type': 'morphological', 'score': 0.0, 'reason': 'ํ˜•ํƒœ์†Œ ๋ถ„์„ ์‹คํŒจ'} + return { + "is_match": False, + "match_type": "morphological", + "score": 0.0, + "reason": "ํ˜•ํƒœ์†Œ ๋ถ„์„ ์‹คํŒจ", + } def _simple_keyword_match(self, title: str, keyword: str) -> dict: """๊ฐ„๋‹จํ•œ ํ‚ค์›Œ๋“œ ๋งค์นญ""" + logger.debug(f"๊ทœ์น™ ๊ธฐ๋ฐ˜ ๋งค์นญ ์‹œ์ž‘: title='{title[:30]}', keyword='{keyword}'") + # ๊ณต๋ฐฑ์œผ๋กœ ๋ถ„๋ฆฌ title_words = title.split() keyword_words = keyword.split() + logger.debug( + f"๋‹จ์–ด ๋ถ„๋ฆฌ ์™„๋ฃŒ: title_words={title_words}, keyword_words={keyword_words}" + ) + matched = 0 for kw in keyword_words: if len(kw) >= 2: for tw in title_words: if kw in tw or tw in kw: matched += 1 + logger.debug(f"๊ทœ์น™ ๊ธฐ๋ฐ˜ ๋งค์นญ: '{kw}' <-> '{tw}'") break match_ratio = matched / len(keyword_words) if keyword_words else 0 threshold = 0.3 + logger.debug( + f"๊ทœ์น™ ๊ธฐ๋ฐ˜ ๋งค์นญ ๊ฒฐ๊ณผ: matched={matched}, total={len(keyword_words)}, ratio={match_ratio:.3f}, threshold={threshold}" + ) + if match_ratio >= threshold: return { - 'is_match': True, - 'match_type': 'simple', - 'score': match_ratio, - 'reason': f"๊ทœ์น™ ๊ธฐ๋ฐ˜ ๋งค์นญ: {matched}/{len(keyword_words)} = {match_ratio:.3f}" + "is_match": True, + "match_type": "simple", + "score": match_ratio, + "reason": f"๊ทœ์น™ ๊ธฐ๋ฐ˜ ๋งค์นญ: {matched}/{len(keyword_words)} = {match_ratio:.3f}", } return { - 'is_match': False, - 'match_type': 'simple', - 'score': match_ratio, - 'reason': f"๊ทœ์น™ ๊ธฐ๋ฐ˜ ๋ฏธ๋‹ฌ: {matched}/{len(keyword_words)} = {match_ratio:.3f} < {threshold}" - } \ No newline at end of file + "is_match": False, + "match_type": "simple", + "score": match_ratio, + "reason": f"๊ทœ์น™ ๊ธฐ๋ฐ˜ ๋ฏธ๋‹ฌ: {matched}/{len(keyword_words)} = {match_ratio:.3f} < {threshold}", + } diff --git a/apps/pre-processing-service/app/utils/similarity_analyzer.py b/apps/pre-processing-service/app/utils/similarity_analyzer.py index d155ee2e..f1c3104e 100644 --- a/apps/pre-processing-service/app/utils/similarity_analyzer.py +++ b/apps/pre-processing-service/app/utils/similarity_analyzer.py @@ -2,6 +2,7 @@ import numpy as np from sklearn.metrics.pairwise import cosine_similarity from transformers import AutoTokenizer, AutoModel +from loguru import logger class SimilarityAnalyzer: @@ -9,57 +10,101 @@ class SimilarityAnalyzer: def __init__(self): try: - self.tokenizer = AutoTokenizer.from_pretrained('klue/bert-base') - self.model = AutoModel.from_pretrained('klue/bert-base') - print("KLUE BERT ๋ชจ๋ธ ๋กœ๋”ฉ ์„ฑ๊ณต") + logger.info("KLUE BERT ๋ชจ๋ธ ๋กœ๋”ฉ ์‹œ๋„ ์ค‘...") + self.tokenizer = AutoTokenizer.from_pretrained("klue/bert-base") + self.model = AutoModel.from_pretrained("klue/bert-base") + logger.success("KLUE BERT ๋ชจ๋ธ ๋กœ๋”ฉ ์„ฑ๊ณต") except Exception as e: - print(f"KLUE BERT ๋กœ๋”ฉ ์‹คํŒจ, ๋‹ค๊ตญ์–ด BERT๋กœ ๋Œ€์ฒด: {e}") + logger.warning(f"KLUE BERT ๋กœ๋”ฉ ์‹คํŒจ, ๋‹ค๊ตญ์–ด BERT๋กœ ๋Œ€์ฒด: {e}") try: - self.tokenizer = AutoTokenizer.from_pretrained('bert-base-multilingual-cased') - self.model = AutoModel.from_pretrained('bert-base-multilingual-cased') - print("๋‹ค๊ตญ์–ด BERT ๋ชจ๋ธ ๋กœ๋”ฉ ์„ฑ๊ณต") + logger.info("๋‹ค๊ตญ์–ด BERT ๋ชจ๋ธ ๋กœ๋”ฉ ์‹œ๋„ ์ค‘...") + self.tokenizer = AutoTokenizer.from_pretrained( + "bert-base-multilingual-cased" + ) + self.model = AutoModel.from_pretrained("bert-base-multilingual-cased") + logger.success("๋‹ค๊ตญ์–ด BERT ๋ชจ๋ธ ๋กœ๋”ฉ ์„ฑ๊ณต") except Exception as e2: - print(f"๋ชจ๋“  BERT ๋ชจ๋ธ ๋กœ๋”ฉ ์‹คํŒจ: {e2}") + logger.error(f"๋ชจ๋“  BERT ๋ชจ๋ธ ๋กœ๋”ฉ ์‹คํŒจ: {e2}") raise e2 def get_embedding(self, text: str) -> np.ndarray: """ํ…์ŠคํŠธ ์ž„๋ฒ ๋”ฉ ์ƒ์„ฑ""" - inputs = self.tokenizer(text, return_tensors='pt', padding=True, truncation=True, max_length=128) - with torch.no_grad(): - outputs = self.model(**inputs) - return outputs.last_hidden_state[:, 0, :].numpy() + try: + logger.debug(f"์ž„๋ฒ ๋”ฉ ์ƒ์„ฑ ์‹œ์ž‘: text='{text[:50]}'") + inputs = self.tokenizer( + text, return_tensors="pt", padding=True, truncation=True, max_length=128 + ) + with torch.no_grad(): + outputs = self.model(**inputs) + embedding = outputs.last_hidden_state[:, 0, :].numpy() + logger.debug(f"์ž„๋ฒ ๋”ฉ ์ƒ์„ฑ ์™„๋ฃŒ: shape={embedding.shape}") + return embedding + except Exception as e: + logger.error(f"์ž„๋ฒ ๋”ฉ ์ƒ์„ฑ ์˜ค๋ฅ˜: text='{text[:30]}', error='{e}'") + raise def calculate_similarity(self, text1: str, text2: str) -> float: """๋‘ ํ…์ŠคํŠธ ๊ฐ„ ์œ ์‚ฌ๋„ ๊ณ„์‚ฐ""" - embedding1 = self.get_embedding(text1) - embedding2 = self.get_embedding(text2) - return cosine_similarity(embedding1, embedding2)[0][0] + try: + logger.debug( + f"์œ ์‚ฌ๋„ ๊ณ„์‚ฐ ์‹œ์ž‘: text1='{text1[:30]}', text2='{text2[:30]}'" + ) + embedding1 = self.get_embedding(text1) + embedding2 = self.get_embedding(text2) + similarity = cosine_similarity(embedding1, embedding2)[0][0] + logger.debug(f"์œ ์‚ฌ๋„ ๊ณ„์‚ฐ ์™„๋ฃŒ: similarity={similarity:.4f}") + return similarity + except Exception as e: + logger.error( + f"์œ ์‚ฌ๋„ ๊ณ„์‚ฐ ์˜ค๋ฅ˜: text1='{text1[:30]}', text2='{text2[:30]}', error='{e}'" + ) + raise - def analyze_similarity_batch(self, keyword: str, product_titles: list[str]) -> list[dict]: + def analyze_similarity_batch( + self, keyword: str, product_titles: list[str] + ) -> list[dict]: """๋ฐฐ์น˜๋กœ ์œ ์‚ฌ๋„ ๋ถ„์„""" - keyword_embedding = self.get_embedding(keyword) - results = [] + logger.info( + f"๋ฐฐ์น˜ ์œ ์‚ฌ๋„ ๋ถ„์„ ์‹œ์ž‘: keyword='{keyword}', titles_count={len(product_titles)}" + ) - for i, title in enumerate(product_titles): - try: - title_embedding = self.get_embedding(title) - similarity = cosine_similarity(keyword_embedding, title_embedding)[0][0] + try: + keyword_embedding = self.get_embedding(keyword) + results = [] - results.append({ - 'index': i, - 'title': title, - 'similarity': float(similarity), - 'score': float(similarity) - }) - except Exception as e: - print(f"์œ ์‚ฌ๋„ ๊ณ„์‚ฐ ์˜ค๋ฅ˜ (์ œ๋ชฉ: {title[:30]}): {e}") - results.append({ - 'index': i, - 'title': title, - 'similarity': 0.0, - 'score': 0.0 - }) + for i, title in enumerate(product_titles): + try: + logger.debug( + f"์œ ์‚ฌ๋„ ๊ณ„์‚ฐ ์ค‘ ({i + 1}/{len(product_titles)}): title='{title[:30]}'" + ) + title_embedding = self.get_embedding(title) + similarity = cosine_similarity(keyword_embedding, title_embedding)[ + 0 + ][0] - # ์œ ์‚ฌ๋„ ๊ธฐ์ค€ ๋‚ด๋ฆผ์ฐจ์ˆœ ์ •๋ ฌ - results.sort(key=lambda x: x['similarity'], reverse=True) - return results \ No newline at end of file + results.append( + { + "index": i, + "title": title, + "similarity": float(similarity), + "score": float(similarity), + } + ) + logger.debug( + f"์œ ์‚ฌ๋„ ๊ณ„์‚ฐ ์™„๋ฃŒ ({i + 1}/{len(product_titles)}): similarity={similarity:.4f}" + ) + except Exception as e: + logger.error(f"์œ ์‚ฌ๋„ ๊ณ„์‚ฐ ์˜ค๋ฅ˜ (์ œ๋ชฉ: {title[:30]}): {e}") + results.append( + {"index": i, "title": title, "similarity": 0.0, "score": 0.0} + ) + + # ์œ ์‚ฌ๋„ ๊ธฐ์ค€ ๋‚ด๋ฆผ์ฐจ์ˆœ ์ •๋ ฌ + results.sort(key=lambda x: x["similarity"], reverse=True) + logger.info( + f"๋ฐฐ์น˜ ์œ ์‚ฌ๋„ ๋ถ„์„ ์™„๋ฃŒ: ์ด {len(results)}๊ฐœ, ์ตœ๊ณ  ์œ ์‚ฌ๋„={results[0]['similarity']:.4f}" + ) + return results + except Exception as e: + logger.error(f"๋ฐฐ์น˜ ์œ ์‚ฌ๋„ ๋ถ„์„ ์‹คํŒจ: keyword='{keyword}', error='{e}'") + raise diff --git a/apps/pre-processing-service/poetry.lock b/apps/pre-processing-service/poetry.lock index 30f79248..2a535f3d 100644 --- a/apps/pre-processing-service/poetry.lock +++ b/apps/pre-processing-service/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. [[package]] name = "annotated-types" @@ -139,6 +139,51 @@ charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] +[[package]] +name = "black" +version = "25.1.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, + {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, + {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, + {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, + {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, + {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, + {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, + {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, + {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, + {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, + {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, + {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, + {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, + {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, + {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, + {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, + {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, + {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, + {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, + {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, + {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, + {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "bs4" version = "0.0.2" @@ -154,6 +199,18 @@ files = [ [package.dependencies] beautifulsoup4 = "*" +[[package]] +name = "cachetools" +version = "5.5.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, +] + [[package]] name = "certifi" version = "2025.8.3" @@ -168,84 +225,101 @@ files = [ [[package]] name = "cffi" -version = "1.17.1" +version = "2.0.0" description = "Foreign Function Interface for Python calling C code." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] markers = "os_name == \"nt\" and implementation_name != \"pypy\"" files = [ - {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, - {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, - {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, - {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, - {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, - {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, - {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, - {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, - {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, - {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, - {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, - {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, - {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, - {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, - {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, - {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"}, + {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"}, + {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"}, + {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"}, + {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"}, + {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"}, + {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"}, + {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"}, + {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"}, + {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"}, + {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"}, + {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"}, + {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"}, + {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"}, + {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"}, + {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"}, + {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"}, + {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"}, + {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"}, + {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"}, + {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"}, ] [package.dependencies] -pycparser = "*" +pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} [[package]] name = "charset-normalizer" @@ -342,7 +416,7 @@ version = "8.2.1" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.10" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, @@ -357,7 +431,7 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main"] +groups = ["main", "dev"] markers = "platform_system == \"Windows\" or sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, @@ -365,18 +439,21 @@ files = [ ] [[package]] -name = "dotenv" -version = "0.9.9" -description = "Deprecated package" +name = "dbutils" +version = "3.1.2" +description = "Database connections for multi-threaded environments." optional = false -python-versions = "*" +python-versions = ">=3.7" groups = ["main"] files = [ - {file = "dotenv-0.9.9-py2.py3-none-any.whl", hash = "sha256:29cf74a087b31dafdb5a446b6d7e11cbce8ed2741540e2339c69fbef92c94ce9"}, + {file = "dbutils-3.1.2-py3-none-any.whl", hash = "sha256:0cb388a89eeecf04089aef113a7007c3fac9199e9580c8549829f954870c403a"}, + {file = "dbutils-3.1.2.tar.gz", hash = "sha256:160b5788154f1adeddc61080daff1530b4df2ba0d45af1c3bfbac76db24186b3"}, ] -[package.dependencies] -python-dotenv = "*" +[package.extras] +docs = ["docutils"] +pg = ["PyGreSQL (>=5)"] +tests = ["pytest (>=7)", "ruff"] [[package]] name = "fastapi" @@ -452,6 +529,217 @@ test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask[dataframe,test]", "moto test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard ; python_version < \"3.14\""] tqdm = ["tqdm"] +[[package]] +name = "google" +version = "3.0.0" +description = "Python bindings to the Google search engine." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "google-3.0.0-py2.py3-none-any.whl", hash = "sha256:889cf695f84e4ae2c55fbc0cfdaf4c1e729417fa52ab1db0485202ba173e4935"}, + {file = "google-3.0.0.tar.gz", hash = "sha256:143530122ee5130509ad5e989f0512f7cb218b2d4eddbafbad40fd10e8d8ccbe"}, +] + +[package.dependencies] +beautifulsoup4 = "*" + +[[package]] +name = "google-api-core" +version = "2.25.1" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_api_core-2.25.1-py3-none-any.whl", hash = "sha256:8a2a56c1fef82987a524371f99f3bd0143702fecc670c72e600c1cda6bf8dbb7"}, + {file = "google_api_core-2.25.1.tar.gz", hash = "sha256:d2aaa0b13c78c61cb3f4282c464c046e45fbd75755683c9c525e6e8f7ed0a5e8"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.0" +googleapis-common-protos = ">=1.56.2,<2.0.0" +proto-plus = [ + {version = ">=1.25.0,<2.0.0", markers = "python_version >= \"3.13\""}, + {version = ">=1.22.3,<2.0.0", markers = "python_version < \"3.13\""}, +] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" +requests = ">=2.18.0,<3.0.0" + +[package.extras] +async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.0)"] +grpc = ["grpcio (>=1.33.2,<2.0.0)", "grpcio (>=1.49.1,<2.0.0) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.0)", "grpcio-status (>=1.49.1,<2.0.0) ; python_version >= \"3.11\""] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] + +[[package]] +name = "google-api-python-client" +version = "2.181.0" +description = "Google API Client Library for Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_api_python_client-2.181.0-py3-none-any.whl", hash = "sha256:348730e3ece46434a01415f3d516d7a0885c8e624ce799f50f2d4d86c2475fb7"}, + {file = "google_api_python_client-2.181.0.tar.gz", hash = "sha256:d7060962a274a16a2c6f8fb4b1569324dbff11bfbca8eb050b88ead1dd32261c"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0" +google-auth = ">=1.32.0,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0" +google-auth-httplib2 = ">=0.2.0,<1.0.0" +httplib2 = ">=0.19.0,<1.0.0" +uritemplate = ">=3.0.1,<5" + +[[package]] +name = "google-auth" +version = "2.40.3" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_auth-2.40.3-py2.py3-none-any.whl", hash = "sha256:1370d4593e86213563547f97a92752fc658456fe4514c809544f330fed45a7ca"}, + {file = "google_auth-2.40.3.tar.gz", hash = "sha256:500c3a29adedeb36ea9cf24b8d10858e152f2412e3ca37829b3fa18e33d63b77"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] +pyjwt = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyjwt (>=2.0)"] +pyopenssl = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0)"] +testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "flask", "freezegun", "grpcio", "mock", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] +urllib3 = ["packaging", "urllib3"] + +[[package]] +name = "google-auth-httplib2" +version = "0.2.0" +description = "Google Authentication Library: httplib2 transport" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"}, + {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"}, +] + +[package.dependencies] +google-auth = "*" +httplib2 = ">=0.19.0" + +[[package]] +name = "google-auth-oauthlib" +version = "1.2.2" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "google_auth_oauthlib-1.2.2-py3-none-any.whl", hash = "sha256:fd619506f4b3908b5df17b65f39ca8d66ea56986e5472eb5978fd8f3786f00a2"}, + {file = "google_auth_oauthlib-1.2.2.tar.gz", hash = "sha256:11046fb8d3348b296302dd939ace8af0a724042e8029c1b872d87fabc9f41684"}, +] + +[package.dependencies] +google-auth = ">=2.15.0" +requests-oauthlib = ">=0.7.0" + +[package.extras] +tool = ["click (>=6.0.0)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.70.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8"}, + {file = "googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257"}, +] + +[package.dependencies] +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0)"] + +[[package]] +name = "greenlet" +version = "3.2.4" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" +files = [ + {file = "greenlet-3.2.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8c68325b0d0acf8d91dde4e6f930967dd52a5302cd4062932a6b2e7c2969f47c"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:94385f101946790ae13da500603491f04a76b6e4c059dab271b3ce2e283b2590"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f10fd42b5ee276335863712fa3da6608e93f70629c631bf77145021600abc23c"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c8c9e331e58180d0d83c5b7999255721b725913ff6bc6cf39fa2a45841a4fd4b"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58b97143c9cc7b86fc458f215bd0932f1757ce649e05b640fea2e79b54cedb31"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f"}, + {file = "greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c"}, + {file = "greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa"}, + {file = "greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9"}, + {file = "greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f"}, + {file = "greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02"}, + {file = "greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae"}, + {file = "greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b"}, + {file = "greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c17b6b34111ea72fc5a4e4beec9711d2226285f0386ea83477cbb97c30a3f3a5"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337"}, + {file = "greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01"}, + {file = "greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:18d9260df2b5fbf41ae5139e1be4e796d99655f023a636cd0e11e6406cca7d58"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:671df96c1f23c4a0d4077a325483c1503c96a1b7d9db26592ae770daa41233d4"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:16458c245a38991aa19676900d48bd1a6f2ce3e16595051a4db9d012154e8433"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98"}, + {file = "greenlet-3.2.4-cp39-cp39-win32.whl", hash = "sha256:65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b"}, + {file = "greenlet-3.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb"}, + {file = "greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil", "setuptools"] + [[package]] name = "gunicorn" version = "23.0.0" @@ -530,6 +818,21 @@ http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] trio = ["trio (>=0.22.0,<1.0)"] +[[package]] +name = "httplib2" +version = "0.30.0" +description = "A comprehensive HTTP client library." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "httplib2-0.30.0-py3-none-any.whl", hash = "sha256:d10443a2bdfe0ea5dbb17e016726146d48b574208dafd41e854cf34e7d78842c"}, + {file = "httplib2-0.30.0.tar.gz", hash = "sha256:d5b23c11fcf8e57e00ff91b7008656af0f6242c8886fd97065c97509e4e548c5"}, +] + +[package.dependencies] +pyparsing = ">=3.0.4,<4" + [[package]] name = "httpx" version = "0.28.1" @@ -615,30 +918,12 @@ version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["dev"] files = [ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] -[[package]] -name = "jinja2" -version = "3.1.6" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, - {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - [[package]] name = "joblib" version = "1.5.2" @@ -670,77 +955,6 @@ win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] -[[package]] -name = "markupsafe" -version = "3.0.2" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, - {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, -] - [[package]] name = "mecab-python3" version = "1.0.10" @@ -795,379 +1009,239 @@ unidic = ["unidic"] unidic-lite = ["unidic-lite"] [[package]] -name = "mpmath" -version = "1.3.0" -description = "Python library for arbitrary-precision floating-point arithmetic" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, - {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, -] - -[package.extras] -develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] -docs = ["sphinx"] -gmpy = ["gmpy2 (>=2.1.0a4) ; platform_python_implementation != \"PyPy\""] -tests = ["pytest (>=4.6)"] - -[[package]] -name = "networkx" -version = "3.5" -description = "Python package for creating and manipulating graphs and networks" +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." optional = false -python-versions = ">=3.11" -groups = ["main"] +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec"}, - {file = "networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] -[package.extras] -default = ["matplotlib (>=3.8)", "numpy (>=1.25)", "pandas (>=2.0)", "scipy (>=1.11.2)"] -developer = ["mypy (>=1.15)", "pre-commit (>=4.1)"] -doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=10)", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8.0)", "sphinx-gallery (>=0.18)", "texext (>=0.6.7)"] -example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=2.0.0)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] -extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)", "pytest-xdist (>=3.0)"] -test-extras = ["pytest-mpl", "pytest-randomly"] - [[package]] name = "numpy" -version = "2.3.2" +version = "2.3.3" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.11" groups = ["main"] files = [ - {file = "numpy-2.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:852ae5bed3478b92f093e30f785c98e0cb62fa0a939ed057c31716e18a7a22b9"}, - {file = "numpy-2.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a0e27186e781a69959d0230dd9909b5e26024f8da10683bd6344baea1885168"}, - {file = "numpy-2.3.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:f0a1a8476ad77a228e41619af2fa9505cf69df928e9aaa165746584ea17fed2b"}, - {file = "numpy-2.3.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:cbc95b3813920145032412f7e33d12080f11dc776262df1712e1638207dde9e8"}, - {file = "numpy-2.3.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f75018be4980a7324edc5930fe39aa391d5734531b1926968605416ff58c332d"}, - {file = "numpy-2.3.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20b8200721840f5621b7bd03f8dcd78de33ec522fc40dc2641aa09537df010c3"}, - {file = "numpy-2.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f91e5c028504660d606340a084db4b216567ded1056ea2b4be4f9d10b67197f"}, - {file = "numpy-2.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fb1752a3bb9a3ad2d6b090b88a9a0ae1cd6f004ef95f75825e2f382c183b2097"}, - {file = "numpy-2.3.2-cp311-cp311-win32.whl", hash = "sha256:4ae6863868aaee2f57503c7a5052b3a2807cf7a3914475e637a0ecd366ced220"}, - {file = "numpy-2.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:240259d6564f1c65424bcd10f435145a7644a65a6811cfc3201c4a429ba79170"}, - {file = "numpy-2.3.2-cp311-cp311-win_arm64.whl", hash = "sha256:4209f874d45f921bde2cff1ffcd8a3695f545ad2ffbef6d3d3c6768162efab89"}, - {file = "numpy-2.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bc3186bea41fae9d8e90c2b4fb5f0a1f5a690682da79b92574d63f56b529080b"}, - {file = "numpy-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f4f0215edb189048a3c03bd5b19345bdfa7b45a7a6f72ae5945d2a28272727f"}, - {file = "numpy-2.3.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b1224a734cd509f70816455c3cffe13a4f599b1bf7130f913ba0e2c0b2006c0"}, - {file = "numpy-2.3.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3dcf02866b977a38ba3ec10215220609ab9667378a9e2150615673f3ffd6c73b"}, - {file = "numpy-2.3.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:572d5512df5470f50ada8d1972c5f1082d9a0b7aa5944db8084077570cf98370"}, - {file = "numpy-2.3.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8145dd6d10df13c559d1e4314df29695613575183fa2e2d11fac4c208c8a1f73"}, - {file = "numpy-2.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:103ea7063fa624af04a791c39f97070bf93b96d7af7eb23530cd087dc8dbe9dc"}, - {file = "numpy-2.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc927d7f289d14f5e037be917539620603294454130b6de200091e23d27dc9be"}, - {file = "numpy-2.3.2-cp312-cp312-win32.whl", hash = "sha256:d95f59afe7f808c103be692175008bab926b59309ade3e6d25009e9a171f7036"}, - {file = "numpy-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:9e196ade2400c0c737d93465327d1ae7c06c7cb8a1756121ebf54b06ca183c7f"}, - {file = "numpy-2.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:ee807923782faaf60d0d7331f5e86da7d5e3079e28b291973c545476c2b00d07"}, - {file = "numpy-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c8d9727f5316a256425892b043736d63e89ed15bbfe6556c5ff4d9d4448ff3b3"}, - {file = "numpy-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:efc81393f25f14d11c9d161e46e6ee348637c0a1e8a54bf9dedc472a3fae993b"}, - {file = "numpy-2.3.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dd937f088a2df683cbb79dda9a772b62a3e5a8a7e76690612c2737f38c6ef1b6"}, - {file = "numpy-2.3.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:11e58218c0c46c80509186e460d79fbdc9ca1eb8d8aee39d8f2dc768eb781089"}, - {file = "numpy-2.3.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ad4ebcb683a1f99f4f392cc522ee20a18b2bb12a2c1c42c3d48d5a1adc9d3d2"}, - {file = "numpy-2.3.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:938065908d1d869c7d75d8ec45f735a034771c6ea07088867f713d1cd3bbbe4f"}, - {file = "numpy-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:66459dccc65d8ec98cc7df61307b64bf9e08101f9598755d42d8ae65d9a7a6ee"}, - {file = "numpy-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a7af9ed2aa9ec5950daf05bb11abc4076a108bd3c7db9aa7251d5f107079b6a6"}, - {file = "numpy-2.3.2-cp313-cp313-win32.whl", hash = "sha256:906a30249315f9c8e17b085cc5f87d3f369b35fedd0051d4a84686967bdbbd0b"}, - {file = "numpy-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:c63d95dc9d67b676e9108fe0d2182987ccb0f11933c1e8959f42fa0da8d4fa56"}, - {file = "numpy-2.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:b05a89f2fb84d21235f93de47129dd4f11c16f64c87c33f5e284e6a3a54e43f2"}, - {file = "numpy-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e6ecfeddfa83b02318f4d84acf15fbdbf9ded18e46989a15a8b6995dfbf85ab"}, - {file = "numpy-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:508b0eada3eded10a3b55725b40806a4b855961040180028f52580c4729916a2"}, - {file = "numpy-2.3.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:754d6755d9a7588bdc6ac47dc4ee97867271b17cee39cb87aef079574366db0a"}, - {file = "numpy-2.3.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f66e7d2b2d7712410d3bc5684149040ef5f19856f20277cd17ea83e5006286"}, - {file = "numpy-2.3.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de6ea4e5a65d5a90c7d286ddff2b87f3f4ad61faa3db8dabe936b34c2275b6f8"}, - {file = "numpy-2.3.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3ef07ec8cbc8fc9e369c8dcd52019510c12da4de81367d8b20bc692aa07573a"}, - {file = "numpy-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:27c9f90e7481275c7800dc9c24b7cc40ace3fdb970ae4d21eaff983a32f70c91"}, - {file = "numpy-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:07b62978075b67eee4065b166d000d457c82a1efe726cce608b9db9dd66a73a5"}, - {file = "numpy-2.3.2-cp313-cp313t-win32.whl", hash = "sha256:c771cfac34a4f2c0de8e8c97312d07d64fd8f8ed45bc9f5726a7e947270152b5"}, - {file = "numpy-2.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:72dbebb2dcc8305c431b2836bcc66af967df91be793d63a24e3d9b741374c450"}, - {file = "numpy-2.3.2-cp313-cp313t-win_arm64.whl", hash = "sha256:72c6df2267e926a6d5286b0a6d556ebe49eae261062059317837fda12ddf0c1a"}, - {file = "numpy-2.3.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:448a66d052d0cf14ce9865d159bfc403282c9bc7bb2a31b03cc18b651eca8b1a"}, - {file = "numpy-2.3.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:546aaf78e81b4081b2eba1d105c3b34064783027a06b3ab20b6eba21fb64132b"}, - {file = "numpy-2.3.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:87c930d52f45df092f7578889711a0768094debf73cfcde105e2d66954358125"}, - {file = "numpy-2.3.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:8dc082ea901a62edb8f59713c6a7e28a85daddcb67454c839de57656478f5b19"}, - {file = "numpy-2.3.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af58de8745f7fa9ca1c0c7c943616c6fe28e75d0c81f5c295810e3c83b5be92f"}, - {file = "numpy-2.3.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed5527c4cf10f16c6d0b6bee1f89958bccb0ad2522c8cadc2efd318bcd545f5"}, - {file = "numpy-2.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:095737ed986e00393ec18ec0b21b47c22889ae4b0cd2d5e88342e08b01141f58"}, - {file = "numpy-2.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5e40e80299607f597e1a8a247ff8d71d79c5b52baa11cc1cce30aa92d2da6e0"}, - {file = "numpy-2.3.2-cp314-cp314-win32.whl", hash = "sha256:7d6e390423cc1f76e1b8108c9b6889d20a7a1f59d9a60cac4a050fa734d6c1e2"}, - {file = "numpy-2.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:b9d0878b21e3918d76d2209c924ebb272340da1fb51abc00f986c258cd5e957b"}, - {file = "numpy-2.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:2738534837c6a1d0c39340a190177d7d66fdf432894f469728da901f8f6dc910"}, - {file = "numpy-2.3.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:4d002ecf7c9b53240be3bb69d80f86ddbd34078bae04d87be81c1f58466f264e"}, - {file = "numpy-2.3.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:293b2192c6bcce487dbc6326de5853787f870aeb6c43f8f9c6496db5b1781e45"}, - {file = "numpy-2.3.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0a4f2021a6da53a0d580d6ef5db29947025ae8b35b3250141805ea9a32bbe86b"}, - {file = "numpy-2.3.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9c144440db4bf3bb6372d2c3e49834cc0ff7bb4c24975ab33e01199e645416f2"}, - {file = "numpy-2.3.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f92d6c2a8535dc4fe4419562294ff957f83a16ebdec66df0805e473ffaad8bd0"}, - {file = "numpy-2.3.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cefc2219baa48e468e3db7e706305fcd0c095534a192a08f31e98d83a7d45fb0"}, - {file = "numpy-2.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:76c3e9501ceb50b2ff3824c3589d5d1ab4ac857b0ee3f8f49629d0de55ecf7c2"}, - {file = "numpy-2.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:122bf5ed9a0221b3419672493878ba4967121514b1d7d4656a7580cd11dddcbf"}, - {file = "numpy-2.3.2-cp314-cp314t-win32.whl", hash = "sha256:6f1ae3dcb840edccc45af496f312528c15b1f79ac318169d094e85e4bb35fdf1"}, - {file = "numpy-2.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:087ffc25890d89a43536f75c5fe8770922008758e8eeeef61733957041ed2f9b"}, - {file = "numpy-2.3.2-cp314-cp314t-win_arm64.whl", hash = "sha256:092aeb3449833ea9c0bf0089d70c29ae480685dd2377ec9cdbbb620257f84631"}, - {file = "numpy-2.3.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:14a91ebac98813a49bc6aa1a0dfc09513dcec1d97eaf31ca21a87221a1cdcb15"}, - {file = "numpy-2.3.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:71669b5daae692189540cffc4c439468d35a3f84f0c88b078ecd94337f6cb0ec"}, - {file = "numpy-2.3.2-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:69779198d9caee6e547adb933941ed7520f896fd9656834c300bdf4dd8642712"}, - {file = "numpy-2.3.2-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:2c3271cc4097beb5a60f010bcc1cc204b300bb3eafb4399376418a83a1c6373c"}, - {file = "numpy-2.3.2-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8446acd11fe3dc1830568c941d44449fd5cb83068e5c70bd5a470d323d448296"}, - {file = "numpy-2.3.2-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa098a5ab53fa407fded5870865c6275a5cd4101cfdef8d6fafc48286a96e981"}, - {file = "numpy-2.3.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6936aff90dda378c09bea075af0d9c675fe3a977a9d2402f95a87f440f59f619"}, - {file = "numpy-2.3.2.tar.gz", hash = "sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48"}, -] - -[[package]] -name = "nvidia-cublas-cu12" -version = "12.8.4.1" -description = "CUBLAS native runtime libraries" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:b86f6dd8935884615a0683b663891d43781b819ac4f2ba2b0c9604676af346d0"}, - {file = "nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:8ac4e771d5a348c551b2a426eda6193c19aa630236b418086020df5ba9667142"}, - {file = "nvidia_cublas_cu12-12.8.4.1-py3-none-win_amd64.whl", hash = "sha256:47e9b82132fa8d2b4944e708049229601448aaad7e6f296f630f2d1a32de35af"}, -] - -[[package]] -name = "nvidia-cuda-cupti-cu12" -version = "12.8.90" -description = "CUDA profiling tools runtime libs." -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4412396548808ddfed3f17a467b104ba7751e6b58678a4b840675c56d21cf7ed"}, - {file = "nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea0cb07ebda26bb9b29ba82cda34849e73c166c18162d3913575b0c9db9a6182"}, - {file = "nvidia_cuda_cupti_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:bb479dcdf7e6d4f8b0b01b115260399bf34154a1a2e9fe11c85c517d87efd98e"}, -] - -[[package]] -name = "nvidia-cuda-nvrtc-cu12" -version = "12.8.93" -description = "NVRTC native runtime libraries" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:a7756528852ef889772a84c6cd89d41dfa74667e24cca16bb31f8f061e3e9994"}, - {file = "nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fc1fec1e1637854b4c0a65fb9a8346b51dd9ee69e61ebaccc82058441f15bce8"}, - {file = "nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-win_amd64.whl", hash = "sha256:7a4b6b2904850fe78e0bd179c4b655c404d4bb799ef03ddc60804247099ae909"}, -] - -[[package]] -name = "nvidia-cuda-runtime-cu12" -version = "12.8.90" -description = "CUDA Runtime native Libraries" + {file = "numpy-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ffc4f5caba7dfcbe944ed674b7eef683c7e94874046454bb79ed7ee0236f59d"}, + {file = "numpy-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7e946c7170858a0295f79a60214424caac2ffdb0063d4d79cb681f9aa0aa569"}, + {file = "numpy-2.3.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:cd4260f64bc794c3390a63bf0728220dd1a68170c169088a1e0dfa2fde1be12f"}, + {file = "numpy-2.3.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:f0ddb4b96a87b6728df9362135e764eac3cfa674499943ebc44ce96c478ab125"}, + {file = "numpy-2.3.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:afd07d377f478344ec6ca2b8d4ca08ae8bd44706763d1efb56397de606393f48"}, + {file = "numpy-2.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bc92a5dedcc53857249ca51ef29f5e5f2f8c513e22cfb90faeb20343b8c6f7a6"}, + {file = "numpy-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7af05ed4dc19f308e1d9fc759f36f21921eb7bbfc82843eeec6b2a2863a0aefa"}, + {file = "numpy-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:433bf137e338677cebdd5beac0199ac84712ad9d630b74eceeb759eaa45ddf30"}, + {file = "numpy-2.3.3-cp311-cp311-win32.whl", hash = "sha256:eb63d443d7b4ffd1e873f8155260d7f58e7e4b095961b01c91062935c2491e57"}, + {file = "numpy-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:ec9d249840f6a565f58d8f913bccac2444235025bbb13e9a4681783572ee3caa"}, + {file = "numpy-2.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:74c2a948d02f88c11a3c075d9733f1ae67d97c6bdb97f2bb542f980458b257e7"}, + {file = "numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cfdd09f9c84a1a934cde1eec2267f0a43a7cd44b2cca4ff95b7c0d14d144b0bf"}, + {file = "numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb32e3cf0f762aee47ad1ddc6672988f7f27045b0783c887190545baba73aa25"}, + {file = "numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396b254daeb0a57b1fe0ecb5e3cff6fa79a380fa97c8f7781a6d08cd429418fe"}, + {file = "numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:067e3d7159a5d8f8a0b46ee11148fc35ca9b21f61e3c49fbd0a027450e65a33b"}, + {file = "numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c02d0629d25d426585fb2e45a66154081b9fa677bc92a881ff1d216bc9919a8"}, + {file = "numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9192da52b9745f7f0766531dcfa978b7763916f158bb63bdb8a1eca0068ab20"}, + {file = "numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cd7de500a5b66319db419dc3c345244404a164beae0d0937283b907d8152e6ea"}, + {file = "numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:93d4962d8f82af58f0b2eb85daaf1b3ca23fe0a85d0be8f1f2b7bb46034e56d7"}, + {file = "numpy-2.3.3-cp312-cp312-win32.whl", hash = "sha256:5534ed6b92f9b7dca6c0a19d6df12d41c68b991cef051d108f6dbff3babc4ebf"}, + {file = "numpy-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:497d7cad08e7092dba36e3d296fe4c97708c93daf26643a1ae4b03f6294d30eb"}, + {file = "numpy-2.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:ca0309a18d4dfea6fc6262a66d06c26cfe4640c3926ceec90e57791a82b6eee5"}, + {file = "numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f5415fb78995644253370985342cd03572ef8620b934da27d77377a2285955bf"}, + {file = "numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d00de139a3324e26ed5b95870ce63be7ec7352171bc69a4cf1f157a48e3eb6b7"}, + {file = "numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9dc13c6a5829610cc07422bc74d3ac083bd8323f14e2827d992f9e52e22cd6a6"}, + {file = "numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d79715d95f1894771eb4e60fb23f065663b2298f7d22945d66877aadf33d00c7"}, + {file = "numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952cfd0748514ea7c3afc729a0fc639e61655ce4c55ab9acfab14bda4f402b4c"}, + {file = "numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b83648633d46f77039c29078751f80da65aa64d5622a3cd62aaef9d835b6c93"}, + {file = "numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b001bae8cea1c7dfdb2ae2b017ed0a6f2102d7a70059df1e338e307a4c78a8ae"}, + {file = "numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e9aced64054739037d42fb84c54dd38b81ee238816c948c8f3ed134665dcd86"}, + {file = "numpy-2.3.3-cp313-cp313-win32.whl", hash = "sha256:9591e1221db3f37751e6442850429b3aabf7026d3b05542d102944ca7f00c8a8"}, + {file = "numpy-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f0dadeb302887f07431910f67a14d57209ed91130be0adea2f9793f1a4f817cf"}, + {file = "numpy-2.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:3c7cf302ac6e0b76a64c4aecf1a09e51abd9b01fc7feee80f6c43e3ab1b1dbc5"}, + {file = "numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:eda59e44957d272846bb407aad19f89dc6f58fecf3504bd144f4c5cf81a7eacc"}, + {file = "numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:823d04112bc85ef5c4fda73ba24e6096c8f869931405a80aa8b0e604510a26bc"}, + {file = "numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:40051003e03db4041aa325da2a0971ba41cf65714e65d296397cc0e32de6018b"}, + {file = "numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ee9086235dd6ab7ae75aba5662f582a81ced49f0f1c6de4260a78d8f2d91a19"}, + {file = "numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94fcaa68757c3e2e668ddadeaa86ab05499a70725811e582b6a9858dd472fb30"}, + {file = "numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da1a74b90e7483d6ce5244053399a614b1d6b7bc30a60d2f570e5071f8959d3e"}, + {file = "numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2990adf06d1ecee3b3dcbb4977dfab6e9f09807598d647f04d385d29e7a3c3d3"}, + {file = "numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ed635ff692483b8e3f0fcaa8e7eb8a75ee71aa6d975388224f70821421800cea"}, + {file = "numpy-2.3.3-cp313-cp313t-win32.whl", hash = "sha256:a333b4ed33d8dc2b373cc955ca57babc00cd6f9009991d9edc5ddbc1bac36bcd"}, + {file = "numpy-2.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:4384a169c4d8f97195980815d6fcad04933a7e1ab3b530921c3fef7a1c63426d"}, + {file = "numpy-2.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:75370986cc0bc66f4ce5110ad35aae6d182cc4ce6433c40ad151f53690130bf1"}, + {file = "numpy-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cd052f1fa6a78dee696b58a914b7229ecfa41f0a6d96dc663c1220a55e137593"}, + {file = "numpy-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:414a97499480067d305fcac9716c29cf4d0d76db6ebf0bf3cbce666677f12652"}, + {file = "numpy-2.3.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:50a5fe69f135f88a2be9b6ca0481a68a136f6febe1916e4920e12f1a34e708a7"}, + {file = "numpy-2.3.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:b912f2ed2b67a129e6a601e9d93d4fa37bef67e54cac442a2f588a54afe5c67a"}, + {file = "numpy-2.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9e318ee0596d76d4cb3d78535dc005fa60e5ea348cd131a51e99d0bdbe0b54fe"}, + {file = "numpy-2.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce020080e4a52426202bdb6f7691c65bb55e49f261f31a8f506c9f6bc7450421"}, + {file = "numpy-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e6687dc183aa55dae4a705b35f9c0f8cb178bcaa2f029b241ac5356221d5c021"}, + {file = "numpy-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d8f3b1080782469fdc1718c4ed1d22549b5fb12af0d57d35e992158a772a37cf"}, + {file = "numpy-2.3.3-cp314-cp314-win32.whl", hash = "sha256:cb248499b0bc3be66ebd6578b83e5acacf1d6cb2a77f2248ce0e40fbec5a76d0"}, + {file = "numpy-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:691808c2b26b0f002a032c73255d0bd89751425f379f7bcd22d140db593a96e8"}, + {file = "numpy-2.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:9ad12e976ca7b10f1774b03615a2a4bab8addce37ecc77394d8e986927dc0dfe"}, + {file = "numpy-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9cc48e09feb11e1db00b320e9d30a4151f7369afb96bd0e48d942d09da3a0d00"}, + {file = "numpy-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:901bf6123879b7f251d3631967fd574690734236075082078e0571977c6a8e6a"}, + {file = "numpy-2.3.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:7f025652034199c301049296b59fa7d52c7e625017cae4c75d8662e377bf487d"}, + {file = "numpy-2.3.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:533ca5f6d325c80b6007d4d7fb1984c303553534191024ec6a524a4c92a5935a"}, + {file = "numpy-2.3.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0edd58682a399824633b66885d699d7de982800053acf20be1eaa46d92009c54"}, + {file = "numpy-2.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:367ad5d8fbec5d9296d18478804a530f1191e24ab4d75ab408346ae88045d25e"}, + {file = "numpy-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8f6ac61a217437946a1fa48d24c47c91a0c4f725237871117dea264982128097"}, + {file = "numpy-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:179a42101b845a816d464b6fe9a845dfaf308fdfc7925387195570789bb2c970"}, + {file = "numpy-2.3.3-cp314-cp314t-win32.whl", hash = "sha256:1250c5d3d2562ec4174bce2e3a1523041595f9b651065e4a4473f5f48a6bc8a5"}, + {file = "numpy-2.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:b37a0b2e5935409daebe82c1e42274d30d9dd355852529eab91dab8dcca7419f"}, + {file = "numpy-2.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:78c9f6560dc7e6b3990e32df7ea1a50bbd0e2a111e05209963f5ddcab7073b0b"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1e02c7159791cd481e1e6d5ddd766b62a4d5acf8df4d4d1afe35ee9c5c33a41e"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:dca2d0fc80b3893ae72197b39f69d55a3cd8b17ea1b50aa4c62de82419936150"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:99683cbe0658f8271b333a1b1b4bb3173750ad59c0c61f5bbdc5b318918fffe3"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d9d537a39cc9de668e5cd0e25affb17aec17b577c6b3ae8a3d866b479fbe88d0"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8596ba2f8af5f93b01d97563832686d20206d303024777f6dfc2e7c7c3f1850e"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1ec5615b05369925bd1125f27df33f3b6c8bc10d788d5999ecd8769a1fa04db"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:2e267c7da5bf7309670523896df97f93f6e469fb931161f483cd6882b3b1a5dc"}, + {file = "numpy-2.3.3.tar.gz", hash = "sha256:ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029"}, +] + +[[package]] +name = "oauthlib" +version = "3.3.1" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:52bf7bbee900262ffefe5e9d5a2a69a30d97e2bc5bb6cc866688caa976966e3d"}, - {file = "nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adade8dcbd0edf427b7204d480d6066d33902cab2a4707dcfc48a2d0fd44ab90"}, - {file = "nvidia_cuda_runtime_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:c0c6027f01505bfed6c3b21ec546f69c687689aad5f1a377554bc6ca4aa993a8"}, -] - -[[package]] -name = "nvidia-cudnn-cu12" -version = "9.10.2.21" -description = "cuDNN runtime libraries" -optional = false -python-versions = ">=3" +python-versions = ">=3.8" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" files = [ - {file = "nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:c9132cc3f8958447b4910a1720036d9eff5928cc3179b0a51fb6d167c6cc87d8"}, - {file = "nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:949452be657fa16687d0930933f032835951ef0892b37d2d53824d1a84dc97a8"}, - {file = "nvidia_cudnn_cu12-9.10.2.21-py3-none-win_amd64.whl", hash = "sha256:c6288de7d63e6cf62988f0923f96dc339cea362decb1bf5b3141883392a7d65e"}, + {file = "oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1"}, + {file = "oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9"}, ] -[package.dependencies] -nvidia-cublas-cu12 = "*" +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] -name = "nvidia-cufft-cu12" -version = "11.3.3.83" -description = "CUFFT native runtime libraries" +name = "outcome" +version = "1.3.0.post0" +description = "Capture the outcome of Python function calls." optional = false -python-versions = ">=3" +python-versions = ">=3.7" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" files = [ - {file = "nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:848ef7224d6305cdb2a4df928759dca7b1201874787083b6e7550dd6765ce69a"}, - {file = "nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d2dd21ec0b88cf61b62e6b43564355e5222e4a3fb394cac0db101f2dd0d4f74"}, - {file = "nvidia_cufft_cu12-11.3.3.83-py3-none-win_amd64.whl", hash = "sha256:7a64a98ef2a7c47f905aaf8931b69a3a43f27c55530c698bb2ed7c75c0b42cb7"}, + {file = "outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b"}, + {file = "outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8"}, ] [package.dependencies] -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-cufile-cu12" -version = "1.13.1.3" -description = "cuFile GPUDirect libraries" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1d069003be650e131b21c932ec3d8969c1715379251f8d23a1860554b1cb24fc"}, - {file = "nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:4beb6d4cce47c1a0f1013d72e02b0994730359e17801d395bdcbf20cfb3bb00a"}, -] +attrs = ">=19.2.0" [[package]] -name = "nvidia-curand-cu12" -version = "10.3.9.90" -description = "CURAND native runtime libraries" +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" +python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:dfab99248034673b779bc6decafdc3404a8a6f502462201f2f31f11354204acd"}, - {file = "nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:b32331d4f4df5d6eefa0554c565b626c7216f87a06a4f56fab27c3b68a830ec9"}, - {file = "nvidia_curand_cu12-10.3.9.90-py3-none-win_amd64.whl", hash = "sha256:f149a8ca457277da854f89cf282d6ef43176861926c7ac85b2a0fbd237c587ec"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] -name = "nvidia-cusolver-cu12" -version = "11.7.3.90" -description = "CUDA solver native runtime libraries" +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:db9ed69dbef9715071232caa9b69c52ac7de3a95773c2db65bdba85916e4e5c0"}, - {file = "nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:4376c11ad263152bd50ea295c05370360776f8c3427b30991df774f9fb26c450"}, - {file = "nvidia_cusolver_cu12-11.7.3.90-py3-none-win_amd64.whl", hash = "sha256:4a550db115fcabc4d495eb7d39ac8b58d4ab5d8e63274d3754df1c0ad6a22d34"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] -[package.dependencies] -nvidia-cublas-cu12 = "*" -nvidia-cusparse-cu12 = "*" -nvidia-nvjitlink-cu12 = "*" - [[package]] -name = "nvidia-cusparse-cu12" -version = "12.5.8.93" -description = "CUSPARSE native runtime libraries" +name = "platformdirs" +version = "4.4.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b6c161cb130be1a07a27ea6923df8141f3c295852f4b260c65f18f3e0a091dc"}, - {file = "nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ec05d76bbbd8b61b06a80e1eaf8cf4959c3d4ce8e711b65ebd0443bb0ebb13b"}, - {file = "nvidia_cusparse_cu12-12.5.8.93-py3-none-win_amd64.whl", hash = "sha256:9a33604331cb2cac199f2e7f5104dfbb8a5a898c367a53dfda9ff2acb6b6b4dd"}, + {file = "platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85"}, + {file = "platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf"}, ] -[package.dependencies] -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-cusparselt-cu12" -version = "0.7.1" -description = "NVIDIA cuSPARSELt" -optional = false -python-versions = "*" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8878dce784d0fac90131b6817b607e803c36e629ba34dc5b433471382196b6a5"}, - {file = "nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f1bb701d6b930d5a7cea44c19ceb973311500847f81b634d802b7b539dc55623"}, - {file = "nvidia_cusparselt_cu12-0.7.1-py3-none-win_amd64.whl", hash = "sha256:f67fbb5831940ec829c9117b7f33807db9f9678dc2a617fbe781cac17b4e1075"}, -] +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] [[package]] -name = "nvidia-nccl-cu12" -version = "2.27.3" -description = "NVIDIA Collective Communication Library (NCCL) Runtime" +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "nvidia_nccl_cu12-2.27.3-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9ddf1a245abc36c550870f26d537a9b6087fb2e2e3d6e0ef03374c6fd19d984f"}, - {file = "nvidia_nccl_cu12-2.27.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adf27ccf4238253e0b826bce3ff5fa532d65fc42322c8bfdfaf28024c0fbe039"}, + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] -[[package]] -name = "nvidia-nvjitlink-cu12" -version = "12.8.93" -description = "Nvidia JIT LTO Library" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:81ff63371a7ebd6e6451970684f916be2eab07321b73c9d244dc2b4da7f73b88"}, - {file = "nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:adccd7161ace7261e01bb91e44e88da350895c270d23f744f0820c818b7229e7"}, - {file = "nvidia_nvjitlink_cu12-12.8.93-py3-none-win_amd64.whl", hash = "sha256:bd93fbeeee850917903583587f4fc3a4eafa022e34572251368238ab5e6bd67f"}, -] +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] -name = "nvidia-nvtx-cu12" -version = "12.8.90" -description = "NVIDIA Tools Extension" +name = "poetry-core" +version = "2.1.3" +description = "Poetry PEP 517 Build Backend" optional = false -python-versions = ">=3" +python-versions = "<4.0,>=3.9" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" files = [ - {file = "nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d7ad891da111ebafbf7e015d34879f7112832fc239ff0d7d776b6cb685274615"}, - {file = "nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b17e2001cc0d751a5bc2c6ec6d26ad95913324a4adb86788c944f8ce9ba441f"}, - {file = "nvidia_nvtx_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:619c8304aedc69f02ea82dd244541a83c3d9d40993381b3b590f1adaed3db41e"}, + {file = "poetry_core-2.1.3-py3-none-any.whl", hash = "sha256:2c704f05016698a54ca1d327f46ce2426d72eaca6ff614132c8477c292266771"}, + {file = "poetry_core-2.1.3.tar.gz", hash = "sha256:0522a015477ed622c89aad56a477a57813cace0c8e7ff2a2906b7ef4a2e296a4"}, ] [[package]] -name = "outcome" -version = "1.3.0.post0" -description = "Capture the outcome of Python function calls." +name = "proto-plus" +version = "1.26.1" +description = "Beautiful, Pythonic protocol buffers" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b"}, - {file = "outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8"}, + {file = "proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66"}, + {file = "proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012"}, ] [package.dependencies] -attrs = ">=19.2.0" +protobuf = ">=3.19.0,<7.0.0" -[[package]] -name = "packaging" -version = "25.0" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, - {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, -] +[package.extras] +testing = ["google-api-core (>=1.31.5)"] [[package]] -name = "pluggy" -version = "1.6.0" -description = "plugin and hook calling mechanisms for python" +name = "protobuf" +version = "6.32.0" +description = "" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, - {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, + {file = "protobuf-6.32.0-cp310-abi3-win32.whl", hash = "sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741"}, + {file = "protobuf-6.32.0-cp310-abi3-win_amd64.whl", hash = "sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e"}, + {file = "protobuf-6.32.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0"}, + {file = "protobuf-6.32.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1"}, + {file = "protobuf-6.32.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c"}, + {file = "protobuf-6.32.0-cp39-cp39-win32.whl", hash = "sha256:7db8ed09024f115ac877a1427557b838705359f047b2ff2f2b2364892d19dacb"}, + {file = "protobuf-6.32.0-cp39-cp39-win_amd64.whl", hash = "sha256:15eba1b86f193a407607112ceb9ea0ba9569aed24f93333fe9a497cf2fda37d3"}, + {file = "protobuf-6.32.0-py3-none-any.whl", hash = "sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783"}, + {file = "protobuf-6.32.0.tar.gz", hash = "sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2"}, ] -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["coverage", "pytest", "pytest-benchmark"] - [[package]] name = "psycopg2-binary" version = "2.9.10" @@ -1246,17 +1320,44 @@ files = [ {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, ] +[[package]] +name = "pyasn1" +version = "0.6.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, + {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, +] + +[package.dependencies] +pyasn1 = ">=0.6.1,<0.7.0" + [[package]] name = "pycparser" -version = "2.22" +version = "2.23" description = "C parser in Python" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "os_name == \"nt\" and implementation_name != \"pypy\"" +markers = "os_name == \"nt\" and implementation_name != \"pypy\" and implementation_name != \"PyPy\"" files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, + {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}, + {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, ] [[package]] @@ -1423,7 +1524,7 @@ version = "2.19.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["dev"] files = [ {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, @@ -1432,6 +1533,48 @@ files = [ [package.extras] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pymysql" +version = "1.1.2" +description = "Pure Python MySQL Driver" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pymysql-1.1.2-py3-none-any.whl", hash = "sha256:e6b1d89711dd51f8f74b1631fe08f039e7d76cf67a42a323d3178f0f25762ed9"}, + {file = "pymysql-1.1.2.tar.gz", hash = "sha256:4961d3e165614ae65014e361811a724e2044ad3ea3739de9903ae7c21f539f03"}, +] + +[package.extras] +ed25519 = ["PyNaCl (>=1.4.0)"] +rsa = ["cryptography"] + +[[package]] +name = "pyparsing" +version = "3.2.3" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf"}, + {file = "pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyperclip" +version = "1.9.0" +description = "A cross-platform clipboard module for Python. (Only handles plain text for now.)" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310"}, +] + [[package]] name = "pysocks" version = "1.7.1" @@ -1447,14 +1590,14 @@ files = [ [[package]] name = "pytest" -version = "8.4.1" +version = "8.4.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["dev"] files = [ - {file = "pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7"}, - {file = "pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c"}, + {file = "pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79"}, + {file = "pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01"}, ] [package.dependencies] @@ -1482,77 +1625,6 @@ files = [ [package.extras] cli = ["click (>=5.0)"] -[[package]] -name = "python-mecab-ko" -version = "1.3.7" -description = "A python binding for mecab-ko" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "python_mecab_ko-1.3.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4760efe6327b5707f55db2b4a6f8fb047fe8e068577a9a913304bb0d12e7de44"}, - {file = "python_mecab_ko-1.3.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:27a03ae50aabc7f057c26ad5e4c6c4d431cf696778e45025e208d2f6b7bf115d"}, - {file = "python_mecab_ko-1.3.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8d2539e7ea91eb0705381f75e64c626be4eba69824a8c82fbdf2c4e48a1d389"}, - {file = "python_mecab_ko-1.3.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2bad59670b280548b9060c1b511f6f088c09b977355de7192e9d0044b8f724b"}, - {file = "python_mecab_ko-1.3.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8c4347f075b8748cbc5695f6b91120b0e388344eab5d9c26d50ad3c57c35754"}, - {file = "python_mecab_ko-1.3.7-cp310-cp310-win32.whl", hash = "sha256:682875cd1cafeeb2946b856b1b479144b4e8d28363b6bff3ae1c8b294994742b"}, - {file = "python_mecab_ko-1.3.7-cp310-cp310-win_amd64.whl", hash = "sha256:ef5a6bb8d4611dd621436492adb140c280fe4e155097c5dcc8b1fcdd203abfb6"}, - {file = "python_mecab_ko-1.3.7-cp310-cp310-win_arm64.whl", hash = "sha256:14b070b886d864964710c6a396556d8509be2dce1618f401192fd7c213eb4608"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:da1cc9de07e75beb2d4067c1c072ecabdb293440633fc0e32f2875a14e703829"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:523153de14262c413838852742541d48ad99d41ab8f6c5413a226319ee4c25ef"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:010ca2297e63d08a772466dd401d36ed9914502b8794c08948427a4083b3202c"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d7b35116e98fb736f7c9550eb1a74cfb6aa35c39b0b43cbe7a8837bfa3cd39d4"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0933d3fcb84f6ed36cce49f1939604ac0fcaf4460441e832cb98ca1bdce74a37"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-win32.whl", hash = "sha256:644207821de8c76ff2442d84c8902dd16b239fdc80c79d0774f8b9ea446c4218"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-win_amd64.whl", hash = "sha256:a456e40817dc73f58d7f11ff01af4394cdd1ceab2e98feddde625587603d65f7"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-win_arm64.whl", hash = "sha256:9f5e40101426b87c99ecb1268f56402f9c44f9d06271b28ccc1ec1bc6bc582ac"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7721f69381dac572a1598e5906cc5faba233ed48bc6ff8672082a519d7db0ba1"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:eae5eb6178b06019e3773e9dde126dd29df5ed417406be5611ebdd0f8839c1e1"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e90e8c1009f8f6aa0dfc43c916ff481dc79aa5a7e528a41a193add9c61ac6d1"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a205ca4da908df39d6d70f968426d0e9dc79274a6d34b13a5588ab52f0e12be8"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c0e98a7d94278f4f5d93f03e35cc8044460c0076ab4698b764d5c44bd897dbe"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-win32.whl", hash = "sha256:3145c53772e842a046fdbf0659f0e5235e16d51b0bb8c0d3e8e078dc57d22373"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-win_amd64.whl", hash = "sha256:3387906e66109989603b877899d1ae3a0132795c9c73ad91a5e7c4c077177351"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-win_arm64.whl", hash = "sha256:13126509630e47fc89a8c575f5af3eed1bc09370e978b331caf32325e6b98383"}, - {file = "python_mecab_ko-1.3.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198c0b9a832966927ceceda599b8d2f38426d11d25defa0d4ed819e3d00bfa91"}, - {file = "python_mecab_ko-1.3.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:661da586a6783cd60dc93ebb4dcc182e5cb3d37b98d25fe741c8eb2aabd59b30"}, - {file = "python_mecab_ko-1.3.7-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eab31739769b1ad90fcd81f7e2319f2bc33f7b85aee3a5cec230352963678ac0"}, - {file = "python_mecab_ko-1.3.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10ea3c549eac11cdf9e994ce65fb34653a142d04eaa519c2ba3a99646cb21991"}, - {file = "python_mecab_ko-1.3.7-cp37-cp37m-win32.whl", hash = "sha256:ec22b9f8b7d5ec62d2af48d252f0172e1c4dfdf1387bad356f62b73084bac675"}, - {file = "python_mecab_ko-1.3.7-cp37-cp37m-win_amd64.whl", hash = "sha256:e0fb84a0eda5f77dbb456fb7eba9715349668b2a9bb4235df0904620653eabda"}, - {file = "python_mecab_ko-1.3.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5ad754804a5a5b64b62d77a962d33ef6e931765cede89f880e02e3d18971a5bd"}, - {file = "python_mecab_ko-1.3.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64346e4a627ad3b56647f2d6909ba52bd25b5b29f8d320944ed9dce602ba0b75"}, - {file = "python_mecab_ko-1.3.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0b50438fb570299bd7e4c30549373c171b94f6400c32b0b455b37047e5ed7ed"}, - {file = "python_mecab_ko-1.3.7-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4321180be1e5446bb97e8f803079deb72500af7bbb7d0e2c49ec9995ec3674f5"}, - {file = "python_mecab_ko-1.3.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8c297e6e5a8a0aacd75e9efee465d0bf7f6d1b9f0ccb9b18916e9203ea0e349"}, - {file = "python_mecab_ko-1.3.7-cp38-cp38-win32.whl", hash = "sha256:8015778e03186f8d2e7b0f1c0c9b753617d848cea2c4eba09e59e081080da92a"}, - {file = "python_mecab_ko-1.3.7-cp38-cp38-win_amd64.whl", hash = "sha256:782bf38e817ad54ca16dccd2e4edf083829e259aac1da3187ccc1fd305dfb503"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4fdae16e907470cec155721cc0f849a9d52e01eae316aae53101fa236069505b"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:691bed2317e4cbbf4f00fc11a59d6d95412b72b9bd6eea037880df95fcd7e6a0"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d147ce60440cd04e3e113508f1c7f04ed39bcbb7991921d9c66b060709af253e"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99f02fb9816dda3258726b33423f0b48429582d4386529c08caa01c0d4e8365b"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96c719105eae24c24882fbea821df7a26c961590d06ff932599690785d7efe5"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-win32.whl", hash = "sha256:288ff89e4d1318923acecccfbb0b9d4937a8f93ac27e4868e08c778629d0522a"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-win_amd64.whl", hash = "sha256:12c4b86041350024355d51dd16cb989fd027e142c8083d3b12d21b9262522054"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-win_arm64.whl", hash = "sha256:2a84df563961a6507e170f78b010716a69874fc4b00ce503280f5eb7d62ccd1c"}, - {file = "python_mecab_ko-1.3.7.tar.gz", hash = "sha256:69cbb2ac559a3169c22b1a3aa5d3c247d2f7902d9fe7dc9966189a9c7694af0b"}, -] - -[package.dependencies] -python-mecab-ko-dic = "*" - -[[package]] -name = "python-mecab-ko-dic" -version = "2.1.1.post2" -description = "mecab-ko-dic packaged for Python" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "python-mecab-ko-dic-2.1.1.post2.tar.gz", hash = "sha256:2c423713bdc475345ec98cd084b30759458f8f06c38a9ef94ab8687942c2cd34"}, - {file = "python_mecab_ko_dic-2.1.1.post2-py3-none-any.whl", hash = "sha256:ef8f4e80c8976f1340a7264abb0c96f384fe059fd897584aeba0151753c6ae9b"}, -] - [[package]] name = "pyyaml" version = "6.0.2" @@ -1735,6 +1807,40 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=3.4" +groups = ["main"] +files = [ + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "rsa" +version = "4.9.1" +description = "Pure-Python RSA implementation" +optional = false +python-versions = "<4,>=3.6" +groups = ["main"] +files = [ + {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, + {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + [[package]] name = "safetensors" version = "0.6.2" @@ -1776,38 +1882,43 @@ torch = ["safetensors[numpy]", "torch (>=1.10)"] [[package]] name = "scikit-learn" -version = "1.7.1" +version = "1.7.2" description = "A set of python modules for machine learning and data mining" optional = false python-versions = ">=3.10" groups = ["main"] files = [ - {file = "scikit_learn-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:406204dd4004f0517f0b23cf4b28c6245cbd51ab1b6b78153bc784def214946d"}, - {file = "scikit_learn-1.7.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:16af2e44164f05d04337fd1fc3ae7c4ea61fd9b0d527e22665346336920fe0e1"}, - {file = "scikit_learn-1.7.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2f2e78e56a40c7587dea9a28dc4a49500fa2ead366869418c66f0fd75b80885c"}, - {file = "scikit_learn-1.7.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b62b76ad408a821475b43b7bb90a9b1c9a4d8d125d505c2df0539f06d6e631b1"}, - {file = "scikit_learn-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:9963b065677a4ce295e8ccdee80a1dd62b37249e667095039adcd5bce6e90deb"}, - {file = "scikit_learn-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90c8494ea23e24c0fb371afc474618c1019dc152ce4a10e4607e62196113851b"}, - {file = "scikit_learn-1.7.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:bb870c0daf3bf3be145ec51df8ac84720d9972170786601039f024bf6d61a518"}, - {file = "scikit_learn-1.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:40daccd1b5623f39e8943ab39735cadf0bdce80e67cdca2adcb5426e987320a8"}, - {file = "scikit_learn-1.7.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:30d1f413cfc0aa5a99132a554f1d80517563c34a9d3e7c118fde2d273c6fe0f7"}, - {file = "scikit_learn-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:c711d652829a1805a95d7fe96654604a8f16eab5a9e9ad87b3e60173415cb650"}, - {file = "scikit_learn-1.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3cee419b49b5bbae8796ecd690f97aa412ef1674410c23fc3257c6b8b85b8087"}, - {file = "scikit_learn-1.7.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2fd8b8d35817b0d9ebf0b576f7d5ffbbabdb55536b0655a8aaae629d7ffd2e1f"}, - {file = "scikit_learn-1.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:588410fa19a96a69763202f1d6b7b91d5d7a5d73be36e189bc6396bfb355bd87"}, - {file = "scikit_learn-1.7.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e3142f0abe1ad1d1c31a2ae987621e41f6b578144a911ff4ac94781a583adad7"}, - {file = "scikit_learn-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3ddd9092c1bd469acab337d87930067c87eac6bd544f8d5027430983f1e1ae88"}, - {file = "scikit_learn-1.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b7839687fa46d02e01035ad775982f2470be2668e13ddd151f0f55a5bf123bae"}, - {file = "scikit_learn-1.7.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:a10f276639195a96c86aa572ee0698ad64ee939a7b042060b98bd1930c261d10"}, - {file = "scikit_learn-1.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:13679981fdaebc10cc4c13c43344416a86fcbc61449cb3e6517e1df9d12c8309"}, - {file = "scikit_learn-1.7.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f1262883c6a63f067a980a8cdd2d2e7f2513dddcef6a9eaada6416a7a7cbe43"}, - {file = "scikit_learn-1.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:ca6d31fb10e04d50bfd2b50d66744729dbb512d4efd0223b864e2fdbfc4cee11"}, - {file = "scikit_learn-1.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:781674d096303cfe3d351ae6963ff7c958db61cde3421cd490e3a5a58f2a94ae"}, - {file = "scikit_learn-1.7.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:10679f7f125fe7ecd5fad37dd1aa2daae7e3ad8df7f3eefa08901b8254b3e12c"}, - {file = "scikit_learn-1.7.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1f812729e38c8cb37f760dce71a9b83ccfb04f59b3dca7c6079dcdc60544fa9e"}, - {file = "scikit_learn-1.7.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:88e1a20131cf741b84b89567e1717f27a2ced228e0f29103426102bc2e3b8ef7"}, - {file = "scikit_learn-1.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b1bd1d919210b6a10b7554b717c9000b5485aa95a1d0f177ae0d7ee8ec750da5"}, - {file = "scikit_learn-1.7.1.tar.gz", hash = "sha256:24b3f1e976a4665aa74ee0fcaac2b8fccc6ae77c8e07ab25da3ba6d3292b9802"}, + {file = "scikit_learn-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b33579c10a3081d076ab403df4a4190da4f4432d443521674637677dc91e61f"}, + {file = "scikit_learn-1.7.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:36749fb62b3d961b1ce4fedf08fa57a1986cd409eff2d783bca5d4b9b5fce51c"}, + {file = "scikit_learn-1.7.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7a58814265dfc52b3295b1900cfb5701589d30a8bb026c7540f1e9d3499d5ec8"}, + {file = "scikit_learn-1.7.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a847fea807e278f821a0406ca01e387f97653e284ecbd9750e3ee7c90347f18"}, + {file = "scikit_learn-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:ca250e6836d10e6f402436d6463d6c0e4d8e0234cfb6a9a47835bd392b852ce5"}, + {file = "scikit_learn-1.7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7509693451651cd7361d30ce4e86a1347493554f172b1c72a39300fa2aea79e"}, + {file = "scikit_learn-1.7.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:0486c8f827c2e7b64837c731c8feff72c0bd2b998067a8a9cbc10643c31f0fe1"}, + {file = "scikit_learn-1.7.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:89877e19a80c7b11a2891a27c21c4894fb18e2c2e077815bcade10d34287b20d"}, + {file = "scikit_learn-1.7.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8da8bf89d4d79aaec192d2bda62f9b56ae4e5b4ef93b6a56b5de4977e375c1f1"}, + {file = "scikit_learn-1.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:9b7ed8d58725030568523e937c43e56bc01cadb478fc43c042a9aca1dacb3ba1"}, + {file = "scikit_learn-1.7.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8d91a97fa2b706943822398ab943cde71858a50245e31bc71dba62aab1d60a96"}, + {file = "scikit_learn-1.7.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:acbc0f5fd2edd3432a22c69bed78e837c70cf896cd7993d71d51ba6708507476"}, + {file = "scikit_learn-1.7.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e5bf3d930aee75a65478df91ac1225ff89cd28e9ac7bd1196853a9229b6adb0b"}, + {file = "scikit_learn-1.7.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4d6e9deed1a47aca9fe2f267ab8e8fe82ee20b4526b2c0cd9e135cea10feb44"}, + {file = "scikit_learn-1.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:6088aa475f0785e01bcf8529f55280a3d7d298679f50c0bb70a2364a82d0b290"}, + {file = "scikit_learn-1.7.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b7dacaa05e5d76759fb071558a8b5130f4845166d88654a0f9bdf3eb57851b7"}, + {file = "scikit_learn-1.7.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:abebbd61ad9e1deed54cca45caea8ad5f79e1b93173dece40bb8e0c658dbe6fe"}, + {file = "scikit_learn-1.7.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:502c18e39849c0ea1a5d681af1dbcf15f6cce601aebb657aabbfe84133c1907f"}, + {file = "scikit_learn-1.7.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a4c328a71785382fe3fe676a9ecf2c86189249beff90bf85e22bdb7efaf9ae0"}, + {file = "scikit_learn-1.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:63a9afd6f7b229aad94618c01c252ce9e6fa97918c5ca19c9a17a087d819440c"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9acb6c5e867447b4e1390930e3944a005e2cb115922e693c08a323421a6966e8"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:2a41e2a0ef45063e654152ec9d8bcfc39f7afce35b08902bfe290c2498a67a6a"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98335fb98509b73385b3ab2bd0639b1f610541d3988ee675c670371d6a87aa7c"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:191e5550980d45449126e23ed1d5e9e24b2c68329ee1f691a3987476e115e09c"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-win_amd64.whl", hash = "sha256:57dc4deb1d3762c75d685507fbd0bc17160144b2f2ba4ccea5dc285ab0d0e973"}, + {file = "scikit_learn-1.7.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fa8f63940e29c82d1e67a45d5297bdebbcb585f5a5a50c4914cc2e852ab77f33"}, + {file = "scikit_learn-1.7.2-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:f95dc55b7902b91331fa4e5845dd5bde0580c9cd9612b1b2791b7e80c3d32615"}, + {file = "scikit_learn-1.7.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9656e4a53e54578ad10a434dc1f993330568cfee176dff07112b8785fb413106"}, + {file = "scikit_learn-1.7.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96dc05a854add0e50d3f47a1ef21a10a595016da5b007c7d9cd9d0bffd1fcc61"}, + {file = "scikit_learn-1.7.2-cp314-cp314-win_amd64.whl", hash = "sha256:bb24510ed3f9f61476181e4db51ce801e2ba37541def12dc9333b946fc7a9cf8"}, + {file = "scikit_learn-1.7.2.tar.gz", hash = "sha256:20e9e49ecd130598f1ca38a1d85090e1a600147b9c02fa6f15d69cb53d968fda"}, ] [package.dependencies] @@ -1918,28 +2029,6 @@ typing_extensions = ">=4.14.0,<4.15.0" urllib3 = {version = ">=2.5.0,<3.0", extras = ["socks"]} websocket-client = ">=1.8.0,<1.9.0" -[[package]] -name = "setuptools" -version = "80.9.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.9" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" or python_version >= \"3.12\"" -files = [ - {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, - {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] -core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] - [[package]] name = "sniffio" version = "1.3.1" @@ -1977,41 +2066,119 @@ files = [ ] [[package]] -name = "starlette" -version = "0.47.2" -description = "The little ASGI library that shines." +name = "sqlalchemy" +version = "2.0.43" +description = "Database Abstraction Library" optional = false -python-versions = ">=3.9" +python-versions = ">=3.7" groups = ["main"] files = [ - {file = "starlette-0.47.2-py3-none-any.whl", hash = "sha256:c5847e96134e5c5371ee9fac6fdf1a67336d5815e09eb2a01fdb57a351ef915b"}, - {file = "starlette-0.47.2.tar.gz", hash = "sha256:6ae9aa5db235e4846decc1e7b79c4f346adf41e9777aebeb49dfd09bbd7023d8"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21ba7a08a4253c5825d1db389d4299f64a100ef9800e4624c8bf70d8f136e6ed"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11b9503fa6f8721bef9b8567730f664c5a5153d25e247aadc69247c4bc605227"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07097c0a1886c150ef2adba2ff7437e84d40c0f7dcb44a2c2b9c905ccfc6361c"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cdeff998cb294896a34e5b2f00e383e7c5c4ef3b4bfa375d9104723f15186443"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:bcf0724a62a5670e5718957e05c56ec2d6850267ea859f8ad2481838f889b42c"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-win32.whl", hash = "sha256:c697575d0e2b0a5f0433f679bda22f63873821d991e95a90e9e52aae517b2e32"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-win_amd64.whl", hash = "sha256:d34c0f6dbefd2e816e8f341d0df7d4763d382e3f452423e752ffd1e213da2512"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70322986c0c699dca241418fcf18e637a4369e0ec50540a2b907b184c8bca069"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87accdbba88f33efa7b592dc2e8b2a9c2cdbca73db2f9d5c510790428c09c154"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c00e7845d2f692ebfc7d5e4ec1a3fd87698e4337d09e58d6749a16aedfdf8612"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:022e436a1cb39b13756cf93b48ecce7aa95382b9cfacceb80a7d263129dfd019"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c5e73ba0d76eefc82ec0219d2301cb33bfe5205ed7a2602523111e2e56ccbd20"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9c2e02f06c68092b875d5cbe4824238ab93a7fa35d9c38052c033f7ca45daa18"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-win32.whl", hash = "sha256:e7a903b5b45b0d9fa03ac6a331e1c1d6b7e0ab41c63b6217b3d10357b83c8b00"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-win_amd64.whl", hash = "sha256:4bf0edb24c128b7be0c61cd17eef432e4bef507013292415f3fb7023f02b7d4b"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52d9b73b8fb3e9da34c2b31e6d99d60f5f99fd8c1225c9dad24aeb74a91e1d29"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f42f23e152e4545157fa367b2435a1ace7571cab016ca26038867eb7df2c3631"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fb1a8c5438e0c5ea51afe9c6564f951525795cf432bed0c028c1cb081276685"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db691fa174e8f7036afefe3061bc40ac2b770718be2862bfb03aabae09051aca"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2b3b4927d0bc03d02ad883f402d5de201dbc8894ac87d2e981e7d87430e60d"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d3d9b904ad4a6b175a2de0738248822f5ac410f52c2fd389ada0b5262d6a1e3"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-win32.whl", hash = "sha256:5cda6b51faff2639296e276591808c1726c4a77929cfaa0f514f30a5f6156921"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-win_amd64.whl", hash = "sha256:c5d1730b25d9a07727d20ad74bc1039bbbb0a6ca24e6769861c1aa5bf2c4c4a8"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:20d81fc2736509d7a2bd33292e489b056cbae543661bb7de7ce9f1c0cd6e7f24"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b9fc27650ff5a2c9d490c13c14906b918b0de1f8fcbb4c992712d8caf40e83"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6772e3ca8a43a65a37c88e2f3e2adfd511b0b1da37ef11ed78dea16aeae85bd9"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a113da919c25f7f641ffbd07fbc9077abd4b3b75097c888ab818f962707eb48"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4286a1139f14b7d70141c67a8ae1582fc2b69105f1b09d9573494eb4bb4b2687"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:529064085be2f4d8a6e5fab12d36ad44f1909a18848fcfbdb59cc6d4bbe48efe"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-win32.whl", hash = "sha256:b535d35dea8bbb8195e7e2b40059e2253acb2b7579b73c1b432a35363694641d"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-win_amd64.whl", hash = "sha256:1c6d85327ca688dbae7e2b06d7d84cfe4f3fffa5b5f9e21bb6ce9d0e1a0e0e0a"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e7c08f57f75a2bb62d7ee80a89686a5e5669f199235c6d1dac75cd59374091c3"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:14111d22c29efad445cd5021a70a8b42f7d9152d8ba7f73304c4d82460946aaa"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b27b56eb2f82653168cefe6cb8e970cdaf4f3a6cb2c5e3c3c1cf3158968ff9"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c5a9da957c56e43d72126a3f5845603da00e0293720b03bde0aacffcf2dc04f"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d79f9fdc9584ec83d1b3c75e9f4595c49017f5594fee1a2217117647225d738"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9df7126fd9db49e3a5a3999442cc67e9ee8971f3cb9644250107d7296cb2a164"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-win32.whl", hash = "sha256:7f1ac7828857fcedb0361b48b9ac4821469f7694089d15550bbcf9ab22564a1d"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-win_amd64.whl", hash = "sha256:971ba928fcde01869361f504fcff3b7143b47d30de188b11c6357c0505824197"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4e6aeb2e0932f32950cf56a8b4813cb15ff792fc0c9b3752eaf067cfe298496a"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:61f964a05356f4bca4112e6334ed7c208174511bd56e6b8fc86dad4d024d4185"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46293c39252f93ea0910aababa8752ad628bcce3a10d3f260648dd472256983f"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:136063a68644eca9339d02e6693932116f6a8591ac013b0014479a1de664e40a"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6e2bf13d9256398d037fef09fd8bf9b0bf77876e22647d10761d35593b9ac547"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:44337823462291f17f994d64282a71c51d738fc9ef561bf265f1d0fd9116a782"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-win32.whl", hash = "sha256:13194276e69bb2af56198fef7909d48fd34820de01d9c92711a5fa45497cc7ed"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-win_amd64.whl", hash = "sha256:334f41fa28de9f9be4b78445e68530da3c5fa054c907176460c81494f4ae1f5e"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ceb5c832cc30663aeaf5e39657712f4c4241ad1f638d487ef7216258f6d41fe7"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11f43c39b4b2ec755573952bbcc58d976779d482f6f832d7f33a8d869ae891bf"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:413391b2239db55be14fa4223034d7e13325a1812c8396ecd4f2c08696d5ccad"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c379e37b08c6c527181a397212346be39319fb64323741d23e46abd97a400d34"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03d73ab2a37d9e40dec4984d1813d7878e01dbdc742448d44a7341b7a9f408c7"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8cee08f15d9e238ede42e9bbc1d6e7158d0ca4f176e4eab21f88ac819ae3bd7b"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-win32.whl", hash = "sha256:b3edaec7e8b6dc5cd94523c6df4f294014df67097c8217a89929c99975811414"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-win_amd64.whl", hash = "sha256:227119ce0a89e762ecd882dc661e0aa677a690c914e358f0dd8932a2e8b2765b"}, + {file = "sqlalchemy-2.0.43-py3-none-any.whl", hash = "sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc"}, + {file = "sqlalchemy-2.0.43.tar.gz", hash = "sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417"}, ] [package.dependencies] -anyio = ">=3.6.2,<5" -typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\""} +greenlet = {version = ">=1", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" [package.extras] -full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] [[package]] -name = "sympy" -version = "1.14.0" -description = "Computer algebra system (CAS) in Python" +name = "starlette" +version = "0.47.3" +description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5"}, - {file = "sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517"}, + {file = "starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51"}, + {file = "starlette-0.47.3.tar.gz", hash = "sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9"}, ] [package.dependencies] -mpmath = ">=1.1.0,<1.4" +anyio = ">=3.6.2,<5" +typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\""} [package.extras] -dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] [[package]] name = "threadpoolctl" @@ -2058,69 +2225,6 @@ dev = ["tokenizers[testing]"] docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] testing = ["black (==22.3)", "datasets", "numpy", "pytest", "pytest-asyncio", "requests", "ruff"] -[[package]] -name = "torch" -version = "2.8.0" -description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -optional = false -python-versions = ">=3.9.0" -groups = ["main"] -files = [ - {file = "torch-2.8.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:0be92c08b44009d4131d1ff7a8060d10bafdb7ddcb7359ef8d8c5169007ea905"}, - {file = "torch-2.8.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:89aa9ee820bb39d4d72b794345cccef106b574508dd17dbec457949678c76011"}, - {file = "torch-2.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e8e5bf982e87e2b59d932769938b698858c64cc53753894be25629bdf5cf2f46"}, - {file = "torch-2.8.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:a3f16a58a9a800f589b26d47ee15aca3acf065546137fc2af039876135f4c760"}, - {file = "torch-2.8.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:220a06fd7af8b653c35d359dfe1aaf32f65aa85befa342629f716acb134b9710"}, - {file = "torch-2.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c12fa219f51a933d5f80eeb3a7a5d0cbe9168c0a14bbb4055f1979431660879b"}, - {file = "torch-2.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:8c7ef765e27551b2fbfc0f41bcf270e1292d9bf79f8e0724848b1682be6e80aa"}, - {file = "torch-2.8.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:5ae0524688fb6707c57a530c2325e13bb0090b745ba7b4a2cd6a3ce262572916"}, - {file = "torch-2.8.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e2fab4153768d433f8ed9279c8133a114a034a61e77a3a104dcdf54388838705"}, - {file = "torch-2.8.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2aca0939fb7e4d842561febbd4ffda67a8e958ff725c1c27e244e85e982173c"}, - {file = "torch-2.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:2f4ac52f0130275d7517b03a33d2493bab3693c83dcfadf4f81688ea82147d2e"}, - {file = "torch-2.8.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:619c2869db3ada2c0105487ba21b5008defcc472d23f8b80ed91ac4a380283b0"}, - {file = "torch-2.8.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:2b2f96814e0345f5a5aed9bf9734efa913678ed19caf6dc2cddb7930672d6128"}, - {file = "torch-2.8.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:65616ca8ec6f43245e1f5f296603e33923f4c30f93d65e103d9e50c25b35150b"}, - {file = "torch-2.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:659df54119ae03e83a800addc125856effda88b016dfc54d9f65215c3975be16"}, - {file = "torch-2.8.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:1a62a1ec4b0498930e2543535cf70b1bef8c777713de7ceb84cd79115f553767"}, - {file = "torch-2.8.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:83c13411a26fac3d101fe8035a6b0476ae606deb8688e904e796a3534c197def"}, - {file = "torch-2.8.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:8f0a9d617a66509ded240add3754e462430a6c1fc5589f86c17b433dd808f97a"}, - {file = "torch-2.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a7242b86f42be98ac674b88a4988643b9bc6145437ec8f048fea23f72feb5eca"}, - {file = "torch-2.8.0-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:7b677e17f5a3e69fdef7eb3b9da72622f8d322692930297e4ccb52fefc6c8211"}, - {file = "torch-2.8.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:da6afa31c13b669d4ba49d8a2169f0db2c3ec6bec4af898aa714f401d4c38904"}, - {file = "torch-2.8.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:06fcee8000e5c62a9f3e52a688b9c5abb7c6228d0e56e3452983416025c41381"}, - {file = "torch-2.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:5128fe752a355d9308e56af1ad28b15266fe2da5948660fad44de9e3a9e36e8c"}, - {file = "torch-2.8.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:e9f071f5b52a9f6970dc8a919694b27a91ae9dc08898b2b988abbef5eddfd1ae"}, -] - -[package.dependencies] -filelock = "*" -fsspec = "*" -jinja2 = "*" -networkx = "*" -nvidia-cublas-cu12 = {version = "12.8.4.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-cupti-cu12 = {version = "12.8.90", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-nvrtc-cu12 = {version = "12.8.93", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-runtime-cu12 = {version = "12.8.90", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cudnn-cu12 = {version = "9.10.2.21", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cufft-cu12 = {version = "11.3.3.83", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cufile-cu12 = {version = "1.13.1.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-curand-cu12 = {version = "10.3.9.90", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusolver-cu12 = {version = "11.7.3.90", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusparse-cu12 = {version = "12.5.8.93", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusparselt-cu12 = {version = "0.7.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nccl-cu12 = {version = "2.27.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nvjitlink-cu12 = {version = "12.8.93", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nvtx-cu12 = {version = "12.8.90", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -setuptools = {version = "*", markers = "python_version >= \"3.12\""} -sympy = ">=1.13.3" -triton = {version = "3.4.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -typing-extensions = ">=4.10.0" - -[package.extras] -opt-einsum = ["opt-einsum (>=3.3)"] -optree = ["optree (>=0.13.0)"] -pyyaml = ["pyyaml"] - [[package]] name = "tqdm" version = "4.67.1" @@ -2145,14 +2249,14 @@ telegram = ["requests"] [[package]] name = "transformers" -version = "4.56.0" +version = "4.56.1" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" optional = false python-versions = ">=3.9.0" groups = ["main"] files = [ - {file = "transformers-4.56.0-py3-none-any.whl", hash = "sha256:bacf539c38dd850690856881c4974321af93a22f2ee96bcc994741a2121d8e71"}, - {file = "transformers-4.56.0.tar.gz", hash = "sha256:6ca9c3f38aa4da93ebf877db7156368c1c188c7465f09dbe70951e7622e987fa"}, + {file = "transformers-4.56.1-py3-none-any.whl", hash = "sha256:1697af6addfb6ddbce9618b763f4b52d5a756f6da4899ffd1b4febf58b779248"}, + {file = "transformers-4.56.1.tar.gz", hash = "sha256:0d88b1089a563996fc5f2c34502f10516cad3ea1aa89f179f522b54c8311fe74"}, ] [package.dependencies] @@ -2169,23 +2273,23 @@ tqdm = ">=4.27" [package.extras] accelerate = ["accelerate (>=0.26.0)"] -all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "av", "codecarbon (>=2.8.1)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "jinja2 (>=3.1.0)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "kernels (>=0.6.1,<=0.9)", "librosa", "mistral-common[opencv] (>=1.6.3)", "num2words", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision"] +all = ["Pillow (>=10.0.1,<=15.0)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "accelerate (>=0.26.0)", "av", "codecarbon (>=2.8.1)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "jinja2 (>=3.1.0)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "kernels (>=0.6.1,<=0.9)", "librosa", "mistral-common[opencv] (>=1.6.3)", "num2words", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision"] audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] benchmark = ["optimum-benchmark (>=0.3.0)"] chat-template = ["jinja2 (>=3.1.0)"] codecarbon = ["codecarbon (>=2.8.1)"] deepspeed = ["accelerate (>=0.26.0)", "deepspeed (>=0.9.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.26.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "libcst", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "optuna", "parameterized (>=0.9)", "protobuf", "psutil", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "av", "beautifulsoup4", "codecarbon (>=2.8.1)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "jinja2 (>=3.1.0)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "kernels (>=0.6.1,<=0.9)", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "num2words", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.22.0,<=0.23.0)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "beautifulsoup4", "codecarbon (>=2.8.1)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "kenlm", "kernels (>=0.6.1,<=0.9)", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "num2words", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.26.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "libcst", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "optuna", "parameterized (>=0.9)", "protobuf", "psutil", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "accelerate (>=0.26.0)", "av", "beautifulsoup4", "codecarbon (>=2.8.1)", "cookiecutter (==1.7.3)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "jinja2 (>=3.1.0)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "kernels (>=0.6.1,<=0.9)", "libcst", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "num2words", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict_core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic_lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "libcst", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "onnxconverter-common", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "tf2onnx", "timeout-decorator", "tokenizers (>=0.22.0,<=0.23.0)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "beautifulsoup4", "codecarbon (>=2.8.1)", "cookiecutter (==1.7.3)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "kenlm", "kernels (>=0.6.1,<=0.9)", "libcst", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "num2words", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict_core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic_lite (>=1.0.7)", "urllib3 (<2.0.0)"] flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)", "scipy (<1.13.0)"] flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] ftfy = ["ftfy"] -hf-xet = ["hf-xet"] +hf-xet = ["hf_xet"] hub-kernels = ["kernels (>=0.6.1,<=0.9)"] integrations = ["kernels (>=0.6.1,<=0.9)", "optuna", "ray[tune] (>=2.7.0)", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict_core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic_lite (>=1.0.7)"] mistral-common = ["mistral-common[opencv] (>=1.6.3)"] modelcreation = ["cookiecutter (==1.7.3)"] natten = ["natten (>=0.14.6,<0.15.0)"] @@ -2204,7 +2308,7 @@ serving = ["accelerate (>=0.26.0)", "fastapi", "openai (>=1.98.0)", "pydantic (> sigopt = ["sigopt"] sklearn = ["scikit-learn"] speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "libcst", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "parameterized (>=0.9)", "psutil", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "libcst", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "parameterized (>=0.9)", "psutil", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] tf = ["keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] tf-cpu = ["keras (>2.9,<2.16)", "keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow-cpu (>2.9,<2.16)", "tensorflow-probability (<0.24)", "tensorflow-text (<2.16)", "tf2onnx"] tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] @@ -2214,7 +2318,7 @@ tokenizers = ["tokenizers (>=0.22.0,<=0.23.0)"] torch = ["accelerate (>=0.26.0)", "torch (>=2.2)"] torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.34.0,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "tqdm (>=4.27)"] +torchhub = ["filelock", "huggingface-hub (>=0.34.0,<1.0)", "importlib_metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "tqdm (>=4.27)"] video = ["av"] vision = ["Pillow (>=10.0.1,<=15.0)"] @@ -2255,31 +2359,6 @@ outcome = ">=1.2.0" trio = ">=0.11" wsproto = ">=0.14" -[[package]] -name = "triton" -version = "3.4.0" -description = "A language and compiler for custom Deep Learning operations" -optional = false -python-versions = "<3.14,>=3.9" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "triton-3.4.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ff2785de9bc02f500e085420273bb5cc9c9bb767584a4aa28d6e360cec70128"}, - {file = "triton-3.4.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b70f5e6a41e52e48cfc087436c8a28c17ff98db369447bcaff3b887a3ab4467"}, - {file = "triton-3.4.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:31c1d84a5c0ec2c0f8e8a072d7fd150cab84a9c239eaddc6706c081bfae4eb04"}, - {file = "triton-3.4.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00be2964616f4c619193cb0d1b29a99bd4b001d7dc333816073f92cf2a8ccdeb"}, - {file = "triton-3.4.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7936b18a3499ed62059414d7df563e6c163c5e16c3773678a3ee3d417865035d"}, - {file = "triton-3.4.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98e5c1442eaeabae2e2452ae765801bd53cd4ce873cab0d1bdd59a32ab2d9397"}, -] - -[package.dependencies] -setuptools = ">=40.8.0" - -[package.extras] -build = ["cmake (>=3.20,<4.0)", "lit"] -tests = ["autopep8", "isort", "llnl-hatchet", "numpy", "pytest", "pytest-forked", "pytest-xdist", "scipy (>=1.7.1)"] -tutorials = ["matplotlib", "pandas", "tabulate"] - [[package]] name = "typing-extensions" version = "4.14.1" @@ -2307,6 +2386,18 @@ files = [ [package.dependencies] typing-extensions = ">=4.12.0" +[[package]] +name = "uritemplate" +version = "4.2.0" +description = "Implementation of RFC 6570 URI Templates" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "uritemplate-4.2.0-py3-none-any.whl", hash = "sha256:962201ba1c4edcab02e60f9a0d3821e82dfc5d2d6662a21abd533879bdb8a686"}, + {file = "uritemplate-4.2.0.tar.gz", hash = "sha256:480c2ed180878955863323eea31b0ede668795de182617fef9c6ca09e6ec9d0e"}, +] + [[package]] name = "urllib3" version = "2.5.0" @@ -2398,4 +2489,4 @@ h11 = ">=0.9.0,<1" [metadata] lock-version = "2.1" python-versions = ">=3.11,<3.14" -content-hash = "30722a9f9497e4264b15e7af55b9f8eeb44781a8800f571e477fc146a340179e" +content-hash = "1c4515b957a639ee4f2aecf7e2a9b856228870ff4cdfeccd320dc5376fc3605b" diff --git a/apps/pre-processing-service/pyproject.toml b/apps/pre-processing-service/pyproject.toml index af7d2124..34ece3ae 100644 --- a/apps/pre-processing-service/pyproject.toml +++ b/apps/pre-processing-service/pyproject.toml @@ -1,5 +1,5 @@ [project] -name = "pre-processing-service" +name = "app" version = "0.1.0" description = "" authors = [ @@ -7,31 +7,46 @@ authors = [ ] readme = "README.md" requires-python = ">=3.11,<3.14" -dependencies = [ - "fastapi (>=0.116.1,<0.117.0)", - "uvicorn (>=0.35.0,<0.36.0)", - "loguru (>=0.7.3,<0.8.0)", - "pytest (>=8.4.1,<9.0.0)", - "dotenv (>=0.9.9,<0.10.0)", - "pydantic-settings (>=2.10.1,<3.0.0)", - "psycopg2-binary (>=2.9.10,<3.0.0)", - "asyncpg (>=0.30.0,<0.31.0)", - "gunicorn (>=23.0.0,<24.0.0)", - "requests (>=2.32.5,<3.0.0)", - "bs4 (>=0.0.2,<0.0.3)", - "selenium (>=4.35.0,<5.0.0)", - "transformers (>=4.56.0,<5.0.0)", - "numpy (>=2.3.2,<3.0.0)", - "torch (>=2.8.0,<3.0.0)", - "scikit-learn (>=1.7.1,<2.0.0)", - "python-dotenv (>=1.1.1,<2.0.0)", - "mecab-python3 (>=1.0.10,<2.0.0)", - "httpx (>=0.28.1,<0.29.0)", - "asyncpg (>=0.30.0,<0.31.0)", - "gunicorn (>=23.0.0,<24.0.0)", -] +[[tool.poetry.source]] +name = "pytorch" +url = "https://download.pytorch.org/whl/cpu" +priority = "explicit" + +[tool.poetry.dependencies] +python = ">=3.11,<3.14" +fastapi = ">=0.116.1,<0.117.0" +uvicorn = ">=0.35.0,<0.36.0" +loguru = ">=0.7.3,<0.8.0" +pydantic-settings = ">=2.10.1,<3.0.0" +psycopg2-binary = ">=2.9.10,<3.0.0" +asyncpg = ">=0.30.0,<0.31.0" +gunicorn = ">=23.0.0,<24.0.0" +requests = ">=2.32.5,<3.0.0" +bs4 = ">=0.0.2,<0.0.3" +selenium = ">=4.35.0,<5.0.0" +transformers = ">=4.56.0,<5.0.0" +numpy = ">=2.3.2,<3.0.0" +#torch = ">=2.8.0,<3.0.0" +scikit-learn = ">=1.7.1,<2.0.0" +python-dotenv = ">=1.1.1,<2.0.0" +mecab-python3 = ">=1.0.10,<2.0.0" +httpx = ">=0.28.1,<0.29.0" +pyperclip = ">=1.9.0,<2.0.0" +pymysql = ">=1.1.2,<2.0.0" +sqlalchemy = ">=2.0.43,<3.0.0" +google = "^3.0.0" +google-auth-oauthlib = "^1.2.2" +google-api-python-client = "^2.181.0" +poetry-core=">=2.1.3,<3.0.0" +dbutils=">=3.1.2,<4.0.0" + [build-system] requires = ["poetry-core>=2.0.0,<3.0.0"] build-backend = "poetry.core.masonry.api" + +[tool.poetry.group.dev.dependencies] +black = "^25.1.0" +pytest = "^8.4" + diff --git a/apps/user-service/build.gradle b/apps/user-service/build.gradle index 145af4f6..624067f6 100644 --- a/apps/user-service/build.gradle +++ b/apps/user-service/build.gradle @@ -3,11 +3,13 @@ plugins { id 'org.springframework.boot' version '3.5.4' id 'io.spring.dependency-management' version '1.1.7' id 'com.diffplug.spotless' version '7.2.1' + id 'org.asciidoctor.jvm.convert' version '3.3.2' + id 'com.epages.restdocs-api-spec' version '0.18.2' } -group = 'com.gltkorea' +group = 'site.icebang' version = '0.0.1-alpha-SNAPSHOT' -description = 'GLT korea - fast campus team4 ice bang' +description = 'Ice bang - fast campus team4' java { toolchain { @@ -23,6 +25,8 @@ configurations { all { exclude group: 'org.springframework.boot', module: 'spring-boot-starter-logging' } + // AsciiDoctor Extension for REST Docs + asciidoctorExt } repositories { @@ -74,18 +78,42 @@ dependencies { testImplementation 'org.testcontainers:mariadb' testImplementation 'com.h2database:h2' testRuntimeOnly 'org.junit.platform:junit-platform-launcher' + + // Spring REST Docs + testImplementation 'org.springframework.restdocs:spring-restdocs-mockmvc' + testImplementation 'org.springframework.restdocs:spring-restdocs-webtestclient' + asciidoctorExt 'org.springframework.restdocs:spring-restdocs-asciidoctor' + testImplementation 'com.epages:restdocs-api-spec-mockmvc:0.18.2' +} + +// REST Docs ์Šค๋‹ˆํŽซ ๋””๋ ‰ํ† ๋ฆฌ ์„ค์ • +ext { + snippetsDir = file('build/generated-snippets') } -tasks.named('test') { +tasks.register('unitTest', Test) { + outputs.dir snippetsDir useJUnitPlatform { - // ๊ธฐ๋ณธ์ ์œผ๋กœ๋Š” e2e ํƒœ๊ทธ ์ œ์™ธํ•˜๊ณ  ์‹คํ–‰ - excludeTags 'e2e' + includeTags 'unit' } + systemProperty 'spring.profiles.active', 'test-unit' } +tasks.register('integrationTest', Test) { + outputs.dir snippetsDir + useJUnitPlatform { + includeTags 'integration' + } + + systemProperty 'spring.profiles.active', 'test-integration' + + timeout = Duration.ofMinutes(10) +} + // E2E ํ…Œ์ŠคํŠธ ์ „์šฉ task ์ถ”๊ฐ€ tasks.register('e2eTest', Test) { + outputs.dir snippetsDir useJUnitPlatform { includeTags 'e2e' } @@ -96,15 +124,41 @@ tasks.register('e2eTest', Test) { timeout = Duration.ofMinutes(10) } -// ๋ชจ๋“  ํ…Œ์ŠคํŠธ ์‹คํ–‰ task -tasks.register('allTests', Test) { - useJUnitPlatform() +// AsciiDoctor ์„ค์ • (REST Docs ๋ฌธ์„œ ์ƒ์„ฑ) +asciidoctor { + inputs.dir snippetsDir + configurations 'asciidoctorExt' + dependsOn test + + baseDirFollowsSourceDir() + + attributes( + 'snippets': snippetsDir, + 'source-highlighter': 'coderay', + 'toc': 'left', + 'toclevels': '3', + 'sectlinks': 'true', + 'operation-curl-request-title': 'Example request', + 'operation-http-response-title': 'Example response' + ) +} + +asciidoctor.doFirst { + delete file('src/docs/asciidoc') +} + +// JAR์— ์ƒ์„ฑ๋œ ๋ฌธ์„œ ํฌํ•จ +bootJar { + dependsOn asciidoctor + from ("${asciidoctor.outputDir}/html5") { + into 'static/docs' + } } spotless { java { googleJavaFormat('1.17.0') - importOrder('java', 'javax', 'org', 'com', '', 'com.movement') + importOrder('java', 'javax', 'org', 'com', '', 'site.icebang') endWithNewline() removeUnusedImports() encoding('UTF-8') @@ -118,3 +172,11 @@ spotless { endWithNewline() } } + +openapi3 { + server = 'http://localhost:8080' + title = 'IceBang API' + description = 'IceBang API Documentation' + version = '0.0.1-alpha-snapshot' + format = 'yaml' +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/SecurityConfig.java b/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/SecurityConfig.java deleted file mode 100644 index 4a2fff36..00000000 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/SecurityConfig.java +++ /dev/null @@ -1,76 +0,0 @@ -package com.gltkorea.icebang.config.security; - -import java.security.SecureRandom; - -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.env.Environment; -import org.springframework.security.config.annotation.web.builders.HttpSecurity; -import org.springframework.security.config.annotation.web.configurers.AbstractHttpConfigurer; -import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; -import org.springframework.security.crypto.password.NoOpPasswordEncoder; -import org.springframework.security.crypto.password.PasswordEncoder; -import org.springframework.security.web.SecurityFilterChain; - -import com.gltkorea.icebang.config.security.endpoints.SecurityEndpoints; - -import lombok.RequiredArgsConstructor; - -@Configuration -@RequiredArgsConstructor -public class SecurityConfig { - private final Environment environment; - - @Bean - public SecureRandom secureRandom() { - return new SecureRandom(); - } - - @Bean - public SecurityFilterChain filterChain(HttpSecurity http) throws Exception { - return http.authorizeHttpRequests( - auth -> - auth.requestMatchers(SecurityEndpoints.PUBLIC.getMatchers()) - .permitAll() - .requestMatchers("/auth/login", "/auth/logout") - .permitAll() - .requestMatchers(SecurityEndpoints.DATA_ADMIN.getMatchers()) - .hasAuthority("SUPER_ADMIN") - .requestMatchers(SecurityEndpoints.DATA_ENGINEER.getMatchers()) - .hasAnyAuthority( - "SUPER_ADMIN", "ADMIN", "SENIOR_DATA_ENGINEER", "DATA_ENGINEER") - .requestMatchers(SecurityEndpoints.ANALYST.getMatchers()) - .hasAnyAuthority( - "SUPER_ADMIN", - "ADMIN", - "SENIOR_DATA_ENGINEER", - "DATA_ENGINEER", - "SENIOR_DATA_ANALYST", - "DATA_ANALYST", - "VIEWER") - .requestMatchers(SecurityEndpoints.OPS.getMatchers()) - .hasAnyAuthority( - "SUPER_ADMIN", "ADMIN", "SENIOR_DATA_ENGINEER", "DATA_ENGINEER") - .requestMatchers(SecurityEndpoints.USER.getMatchers()) - .authenticated() - .anyRequest() - .authenticated()) - .formLogin(AbstractHttpConfigurer::disable) - .logout( - logout -> logout.logoutUrl("/auth/logout").logoutSuccessUrl("/auth/login").permitAll()) - .csrf(AbstractHttpConfigurer::disable) // API ์‚ฌ์šฉ์„ ์œ„ํ•ด CSRF ๋น„ํ™œ์„ฑํ™” - .build(); - } - - @Bean - public PasswordEncoder bCryptPasswordEncoder() { - String[] activeProfiles = environment.getActiveProfiles(); - - for (String profile : activeProfiles) { - if ("dev".equals(profile) || "test".equals(profile)) { - return NoOpPasswordEncoder.getInstance(); - } - } - return new BCryptPasswordEncoder(); - } -} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/controller/AuthController.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/controller/AuthController.java deleted file mode 100644 index 5da466f6..00000000 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/controller/AuthController.java +++ /dev/null @@ -1,25 +0,0 @@ -package com.gltkorea.icebang.domain.auth.controller; - -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; - -import com.gltkorea.icebang.common.dto.ApiResponse; -import com.gltkorea.icebang.domain.auth.dto.RegisterDto; -import com.gltkorea.icebang.domain.auth.service.AuthService; - -import jakarta.validation.Valid; -import lombok.RequiredArgsConstructor; - -@RestController -@RequestMapping("/v0/auth") -@RequiredArgsConstructor -public class AuthController { - private final AuthService authService; - - @PostMapping("/register") - @ResponseStatus(HttpStatus.CREATED) - public ApiResponse register(@Valid @RequestBody RegisterDto registerDto) { - authService.registerUser(registerDto); - return ApiResponse.success(null); - } -} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/EmailService.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/EmailService.java deleted file mode 100644 index ac0b6663..00000000 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/EmailService.java +++ /dev/null @@ -1,7 +0,0 @@ -package com.gltkorea.icebang.domain.email.service; - -import com.gltkorea.icebang.domain.email.dto.EmailRequest; - -public interface EmailService { - void send(EmailRequest emailRequest); -} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationOptionsDto.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationOptionsDto.java deleted file mode 100644 index c416d811..00000000 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationOptionsDto.java +++ /dev/null @@ -1,20 +0,0 @@ -package com.gltkorea.icebang.domain.organization.dto; - -import java.util.List; - -import com.gltkorea.icebang.domain.department.dto.DepartmentsCardDto; -import com.gltkorea.icebang.domain.position.dto.PositionCardDto; -import com.gltkorea.icebang.domain.roles.dto.RolesCardDto; - -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Data; - -@Builder -@Data -@AllArgsConstructor -public class OrganizationOptionsDto { - List departments; - List positions; - List roles; -} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/service/OrganizationService.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/service/OrganizationService.java deleted file mode 100644 index 4cebdfe5..00000000 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/service/OrganizationService.java +++ /dev/null @@ -1,39 +0,0 @@ -package com.gltkorea.icebang.domain.organization.service; - -import java.math.BigInteger; -import java.util.List; - -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import com.gltkorea.icebang.domain.department.dto.DepartmentsCardDto; -import com.gltkorea.icebang.domain.organization.dto.OrganizationCardDto; -import com.gltkorea.icebang.domain.organization.dto.OrganizationOptionsDto; -import com.gltkorea.icebang.domain.position.dto.PositionCardDto; -import com.gltkorea.icebang.domain.roles.dto.RolesCardDto; -import com.gltkorea.icebang.mapper.OrganizationMapper; - -import lombok.RequiredArgsConstructor; - -@Service -@RequiredArgsConstructor -public class OrganizationService { - private final OrganizationMapper organizationMapper; - - @Transactional(readOnly = true) - public List getAllOrganizationList() { - return organizationMapper.findAllOrganizations(); - } - - public OrganizationOptionsDto getOrganizationOptions(BigInteger id) { - List departments = organizationMapper.findDepartmentsByOrganizationId(id); - List positions = organizationMapper.findPositionsByOrganizationId(id); - List roles = organizationMapper.findRolesByOrganizationId(id); - - return OrganizationOptionsDto.builder() - .departments(departments) - .positions(positions) - .roles(roles) - .build(); - } -} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/controller/UserController.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/controller/UserController.java deleted file mode 100644 index e6b07bce..00000000 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/controller/UserController.java +++ /dev/null @@ -1,30 +0,0 @@ -package com.gltkorea.icebang.domain.user.controller; - -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RestController; - -import com.gltkorea.icebang.common.dto.ApiResponse; -import com.gltkorea.icebang.domain.user.dto.CheckEmailRequest; -import com.gltkorea.icebang.domain.user.dto.CheckEmailResponse; -import com.gltkorea.icebang.domain.user.service.UserService; - -import jakarta.validation.Valid; -import lombok.RequiredArgsConstructor; - -@RestController -@RequestMapping("/v0/users") -@RequiredArgsConstructor -public class UserController { - private final UserService userService; - - @PostMapping("/check-email") - public ApiResponse checkEmailAvailable( - @Valid @RequestBody CheckEmailRequest request) { - Boolean available = !userService.isExistEmail(request); - String message = available.equals(Boolean.TRUE) ? "์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ์ด๋ฉ”์ผ์ž…๋‹ˆ๋‹ค." : "์ด๋ฏธ ๊ฐ€์ž…๋œ ์ด๋ฉ”์ผ์ž…๋‹ˆ๋‹ค."; - - return ApiResponse.success(CheckEmailResponse.builder().available(available).build(), message); - } -} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/service/UserService.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/service/UserService.java deleted file mode 100644 index fcf87ac9..00000000 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/service/UserService.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.gltkorea.icebang.domain.user.service; - -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import com.gltkorea.icebang.domain.auth.dto.RegisterDto; -import com.gltkorea.icebang.domain.user.dto.CheckEmailRequest; -import com.gltkorea.icebang.entity.Users; -import com.gltkorea.icebang.mapper.UserMapper; - -import jakarta.validation.Valid; -import lombok.RequiredArgsConstructor; - -@Service -@RequiredArgsConstructor -public class UserService { - private final UserMapper userMapper; - - public void registerUser(RegisterDto registerDto) { - Users user = - Users.builder() - .name(registerDto.getName()) - .email(registerDto.getEmail()) - .password(registerDto.getPassword()) - .status("PENDING") - .build(); - } - - @Transactional(readOnly = true) - public Boolean isExistEmail(@Valid CheckEmailRequest request) { - return userMapper.existsByEmail(request.getEmail()); - } -} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/entity/Users.java b/apps/user-service/src/main/java/com/gltkorea/icebang/entity/Users.java deleted file mode 100644 index 44f30244..00000000 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/entity/Users.java +++ /dev/null @@ -1,22 +0,0 @@ -package com.gltkorea.icebang.entity; - -import java.math.BigInteger; -import java.time.LocalDateTime; - -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Data; - -@Data -@Builder -@AllArgsConstructor -@Deprecated -public class Users { - private BigInteger id; - private String name; - private String email; - private String password; - private String status; - private LocalDateTime createdAt; - private LocalDateTime updatedAt; -} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/OrganizationMapper.java b/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/OrganizationMapper.java deleted file mode 100644 index 2643af9f..00000000 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/OrganizationMapper.java +++ /dev/null @@ -1,25 +0,0 @@ -package com.gltkorea.icebang.mapper; - -import java.math.BigInteger; -import java.util.List; - -import org.apache.ibatis.annotations.Mapper; -import org.apache.ibatis.annotations.Param; - -import com.gltkorea.icebang.domain.department.dto.DepartmentsCardDto; -import com.gltkorea.icebang.domain.organization.dto.OrganizationCardDto; -import com.gltkorea.icebang.domain.position.dto.PositionCardDto; -import com.gltkorea.icebang.domain.roles.dto.RolesCardDto; - -@Mapper -public interface OrganizationMapper { - List findAllOrganizations(); - - List findDepartmentsByOrganizationId( - @Param("organizationId") BigInteger organizationId); - - List findPositionsByOrganizationId( - @Param("organizationId") BigInteger organizationId); - - List findRolesByOrganizationId(@Param("organizationId") BigInteger organizationId); -} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/UserServiceApplication.java b/apps/user-service/src/main/java/site/icebang/UserServiceApplication.java similarity index 88% rename from apps/user-service/src/main/java/com/gltkorea/icebang/UserServiceApplication.java rename to apps/user-service/src/main/java/site/icebang/UserServiceApplication.java index 002a6bc4..68da9f2a 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/UserServiceApplication.java +++ b/apps/user-service/src/main/java/site/icebang/UserServiceApplication.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang; +package site.icebang; import org.mybatis.spring.annotation.MapperScan; import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; @@ -9,7 +9,7 @@ @EnableScheduling @EnableBatchProcessing @SpringBootApplication -@MapperScan("com.gltkorea.icebang.mapper") +@MapperScan("site.icebang.**.mapper") public class UserServiceApplication { public static void main(String[] args) { diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/batch/job/BlogContentJobConfig.java b/apps/user-service/src/main/java/site/icebang/batch/job/BlogContentJobConfig.java similarity index 91% rename from apps/user-service/src/main/java/com/gltkorea/icebang/batch/job/BlogContentJobConfig.java rename to apps/user-service/src/main/java/site/icebang/batch/job/BlogContentJobConfig.java index 61626411..5e85fe9f 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/batch/job/BlogContentJobConfig.java +++ b/apps/user-service/src/main/java/site/icebang/batch/job/BlogContentJobConfig.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.batch.job; +package site.icebang.batch.job; import org.springframework.batch.core.Job; import org.springframework.batch.core.Step; @@ -9,11 +9,11 @@ import org.springframework.context.annotation.Configuration; import org.springframework.transaction.PlatformTransactionManager; -import com.gltkorea.icebang.batch.tasklet.ContentGenerationTasklet; -import com.gltkorea.icebang.batch.tasklet.KeywordExtractionTasklet; - import lombok.RequiredArgsConstructor; +import site.icebang.batch.tasklet.ContentGenerationTasklet; +import site.icebang.batch.tasklet.KeywordExtractionTasklet; + @Configuration @RequiredArgsConstructor public class BlogContentJobConfig { diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/batch/tasklet/ContentGenerationTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/ContentGenerationTasklet.java similarity index 97% rename from apps/user-service/src/main/java/com/gltkorea/icebang/batch/tasklet/ContentGenerationTasklet.java rename to apps/user-service/src/main/java/site/icebang/batch/tasklet/ContentGenerationTasklet.java index 5cc8918a..a6ef4505 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/batch/tasklet/ContentGenerationTasklet.java +++ b/apps/user-service/src/main/java/site/icebang/batch/tasklet/ContentGenerationTasklet.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.batch.tasklet; +package site.icebang.batch.tasklet; import java.util.List; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/batch/tasklet/KeywordExtractionTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/KeywordExtractionTasklet.java similarity index 97% rename from apps/user-service/src/main/java/com/gltkorea/icebang/batch/tasklet/KeywordExtractionTasklet.java rename to apps/user-service/src/main/java/site/icebang/batch/tasklet/KeywordExtractionTasklet.java index 520403b3..ebc27117 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/batch/tasklet/KeywordExtractionTasklet.java +++ b/apps/user-service/src/main/java/site/icebang/batch/tasklet/KeywordExtractionTasklet.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.batch.tasklet; +package site.icebang.batch.tasklet; import java.util.List; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/common/dto/ApiResponse.java b/apps/user-service/src/main/java/site/icebang/common/dto/ApiResponse.java similarity index 96% rename from apps/user-service/src/main/java/com/gltkorea/icebang/common/dto/ApiResponse.java rename to apps/user-service/src/main/java/site/icebang/common/dto/ApiResponse.java index 7cf5edb3..0f99e59b 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/common/dto/ApiResponse.java +++ b/apps/user-service/src/main/java/site/icebang/common/dto/ApiResponse.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.common.dto; +package site.icebang.common.dto; import org.springframework.http.HttpStatus; diff --git a/apps/user-service/src/main/java/site/icebang/common/health/api/HealthCheckController.java b/apps/user-service/src/main/java/site/icebang/common/health/api/HealthCheckController.java new file mode 100644 index 00000000..8b65e7a0 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/common/health/api/HealthCheckController.java @@ -0,0 +1,34 @@ +package site.icebang.common.health.api; + +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RestController; + +import lombok.RequiredArgsConstructor; + +import site.icebang.common.health.service.FastApiClient; + +@RestController +@RequiredArgsConstructor +public class HealthCheckController { + + private final FastApiClient fastApiClient; + + /** + * Spring Boot์™€ FastAPI ์„œ๋ฒ„ ๊ฐ„์˜ ์—ฐ๊ฒฐ ์ƒํƒœ๋ฅผ ํ™•์ธํ•˜๋Š” ํ—ฌ์Šค ์ฒดํฌ API + * + * @return FastAPI ์„œ๋ฒ„๋กœ๋ถ€ํ„ฐ์˜ ์‘๋‹ต + */ + @GetMapping("/ping") + public ResponseEntity pingFastApi() { + String result = fastApiClient.ping(); + + if (result.startsWith("ERROR")) { + // FastAPI ์—ฐ๊ฒฐ ์‹คํŒจ ์‹œ 503 Service Unavailable ์ƒํƒœ ์ฝ”๋“œ์™€ ํ•จ๊ป˜ ์—๋Ÿฌ ๋ฉ”์‹œ์ง€ ๋ฐ˜ํ™˜ + return ResponseEntity.status(503).body(result); + } + + // ์„ฑ๊ณต ์‹œ 200 OK ์ƒํƒœ ์ฝ”๋“œ์™€ ํ•จ๊ป˜ FastAPI๋กœ๋ถ€ํ„ฐ ๋ฐ›์€ ์‘๋‹ต("PONG" ๋“ฑ) ๋ฐ˜ํ™˜ + return ResponseEntity.ok(result); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/common/health/service/FastApiClient.java b/apps/user-service/src/main/java/site/icebang/common/health/service/FastApiClient.java new file mode 100644 index 00000000..8d8ff496 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/common/health/service/FastApiClient.java @@ -0,0 +1,42 @@ +package site.icebang.common.health.service; + +import org.springframework.stereotype.Service; +import org.springframework.web.client.RestClientException; +import org.springframework.web.client.RestTemplate; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Service +@RequiredArgsConstructor +public class FastApiClient { + + // WebConfig์—์„œ ์ƒ์„ฑํ•˜๊ณ  ํƒ€์ž„์•„์›ƒ์ด ์„ค์ •๋œ RestTemplate Bean์„ ์ฃผ์ž…๋ฐ›์Šต๋‹ˆ๋‹ค. + private final RestTemplate restTemplate; + + // FastAPI ์„œ๋ฒ„์˜ ping ์—”๋“œํฌ์ธํŠธ URL์„ ์ƒ์ˆ˜๋กœ ํ•˜๋“œ์ฝ”๋”ฉํ•ฉ๋‹ˆ๋‹ค. + private static final String FASTAPI_PING_URL = "http://localhost:8000/ping"; + + /** + * FastAPI ์„œ๋ฒ„์˜ /ping ์—”๋“œํฌ์ธํŠธ๋ฅผ ํ˜ธ์ถœํ•˜์—ฌ ์—ฐ๊ฒฐ์„ ํ…Œ์ŠคํŠธํ•ฉ๋‹ˆ๋‹ค. + * + * @return ์—ฐ๊ฒฐ ์„ฑ๊ณต ์‹œ FastAPI๋กœ๋ถ€ํ„ฐ ๋ฐ›์€ ์‘๋‹ต, ์‹คํŒจ ์‹œ ์—๋Ÿฌ ๋ฉ”์‹œ์ง€ + */ + public String ping() { + log.info("Attempting to connect to FastAPI server at: {}", FASTAPI_PING_URL); + + try { + // FastAPI ์„œ๋ฒ„์— GET ์š”์ฒญ์„ ๋ณด๋‚ด๊ณ , ์‘๋‹ต์„ String์œผ๋กœ ๋ฐ›์Šต๋‹ˆ๋‹ค. + // WebConfig์— ์„ค์ •๋œ 5์ดˆ ํƒ€์ž„์•„์›ƒ์ด ์—ฌ๊ธฐ์„œ ์ ์šฉ๋ฉ๋‹ˆ๋‹ค. + String response = restTemplate.getForObject(FASTAPI_PING_URL, String.class); + log.info("Successfully received response from FastAPI: {}", response); + return response; + } catch (RestClientException e) { + // RestClientException์€ ์—ฐ๊ฒฐ ์‹คํŒจ, ํƒ€์ž„์•„์›ƒ ๋“ฑ ๋ชจ๋“  ํ†ต์‹  ์˜ค๋ฅ˜๋ฅผ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. + log.error( + "Failed to connect to FastAPI server at {}. Error: {}", FASTAPI_PING_URL, e.getMessage()); + return "ERROR: Cannot connect to FastAPI"; + } + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/common/utils/RandomPasswordGenerator.java b/apps/user-service/src/main/java/site/icebang/common/utils/RandomPasswordGenerator.java similarity index 97% rename from apps/user-service/src/main/java/com/gltkorea/icebang/common/utils/RandomPasswordGenerator.java rename to apps/user-service/src/main/java/site/icebang/common/utils/RandomPasswordGenerator.java index 3716e5b6..c77189c2 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/common/utils/RandomPasswordGenerator.java +++ b/apps/user-service/src/main/java/site/icebang/common/utils/RandomPasswordGenerator.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.common.utils; +package site.icebang.common.utils; import java.security.SecureRandom; import java.util.Collections; diff --git a/apps/user-service/src/main/java/site/icebang/domain/auth/controller/AuthController.java b/apps/user-service/src/main/java/site/icebang/domain/auth/controller/AuthController.java new file mode 100644 index 00000000..d0a98142 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/controller/AuthController.java @@ -0,0 +1,64 @@ +package site.icebang.domain.auth.controller; + +import org.springframework.http.HttpStatus; +import org.springframework.security.authentication.AuthenticationManager; +import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.annotation.AuthenticationPrincipal; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.security.web.context.HttpSessionSecurityContextRepository; +import org.springframework.web.bind.annotation.*; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpSession; +import jakarta.validation.Valid; +import lombok.RequiredArgsConstructor; + +import site.icebang.common.dto.ApiResponse; +import site.icebang.domain.auth.dto.LoginRequestDto; +import site.icebang.domain.auth.dto.RegisterDto; +import site.icebang.domain.auth.model.AuthCredential; +import site.icebang.domain.auth.service.AuthService; + +@RestController +@RequestMapping("/v0/auth") +@RequiredArgsConstructor +public class AuthController { + private final AuthService authService; + private final AuthenticationManager authenticationManager; + + @PostMapping("/register") + @ResponseStatus(HttpStatus.CREATED) + public ApiResponse register(@Valid @RequestBody RegisterDto registerDto) { + authService.registerUser(registerDto); + return ApiResponse.success(null); + } + + @PostMapping("/login") + public ApiResponse login( + @RequestBody LoginRequestDto request, HttpServletRequest httpRequest) { + UsernamePasswordAuthenticationToken token = + new UsernamePasswordAuthenticationToken(request.getEmail(), request.getPassword()); + + Authentication auth = authenticationManager.authenticate(token); + + SecurityContextHolder.getContext().setAuthentication(auth); + + HttpSession session = httpRequest.getSession(true); + session.setAttribute( + HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY, + SecurityContextHolder.getContext()); + + return ApiResponse.success(null); + } + + @GetMapping("/check-session") + public ApiResponse checkSession(@AuthenticationPrincipal AuthCredential user) { + return ApiResponse.success(user != null); + } + + @GetMapping("/permissions") + public ApiResponse getPermissions(@AuthenticationPrincipal AuthCredential user) { + return ApiResponse.success(user); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/auth/dto/LoginRequestDto.java b/apps/user-service/src/main/java/site/icebang/domain/auth/dto/LoginRequestDto.java new file mode 100644 index 00000000..f3b64e0b --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/dto/LoginRequestDto.java @@ -0,0 +1,15 @@ +package site.icebang.domain.auth.dto; + +import jakarta.validation.constraints.Email; +import jakarta.validation.constraints.NotBlank; +import lombok.Data; + +@Data +public class LoginRequestDto { + @NotBlank(message = "์ด๋ฉ”์ผ์€ ํ•„์ˆ˜์ž…๋‹ˆ๋‹ค") + @Email(message = "์˜ฌ๋ฐ”๋ฅธ ์ด๋ฉ”์ผ ํ˜•์‹์ด ์•„๋‹™๋‹ˆ๋‹ค") + private String email; + + @NotBlank(message = "๋น„๋ฐƒ๋ฒˆํ˜ธ๋Š” ํ•„์ˆ˜์ž…๋‹ˆ๋‹ค") + private String password; +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/dto/RegisterDto.java b/apps/user-service/src/main/java/site/icebang/domain/auth/dto/RegisterDto.java similarity index 96% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/dto/RegisterDto.java rename to apps/user-service/src/main/java/site/icebang/domain/auth/dto/RegisterDto.java index 1ff305aa..58cef092 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/dto/RegisterDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/dto/RegisterDto.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.auth.dto; +package site.icebang.domain.auth.dto; import java.math.BigInteger; import java.util.Set; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/AuthMapper.java b/apps/user-service/src/main/java/site/icebang/domain/auth/mapper/AuthMapper.java similarity index 62% rename from apps/user-service/src/main/java/com/gltkorea/icebang/mapper/AuthMapper.java rename to apps/user-service/src/main/java/site/icebang/domain/auth/mapper/AuthMapper.java index 09033730..ddc07ffe 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/AuthMapper.java +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/mapper/AuthMapper.java @@ -1,11 +1,14 @@ -package com.gltkorea.icebang.mapper; +package site.icebang.domain.auth.mapper; import org.apache.ibatis.annotations.Mapper; -import com.gltkorea.icebang.domain.auth.dto.RegisterDto; +import site.icebang.domain.auth.dto.RegisterDto; +import site.icebang.domain.auth.model.AuthCredential; @Mapper public interface AuthMapper { + AuthCredential findUserByEmail(String email); + boolean existsByEmail(String email); int insertUser(RegisterDto dto); // users insert diff --git a/apps/user-service/src/main/java/site/icebang/domain/auth/model/AuthCredential.java b/apps/user-service/src/main/java/site/icebang/domain/auth/model/AuthCredential.java new file mode 100644 index 00000000..22ef38f2 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/model/AuthCredential.java @@ -0,0 +1,81 @@ +package site.icebang.domain.auth.model; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; + +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.core.userdetails.UserDetails; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@AllArgsConstructor +@NoArgsConstructor +public class AuthCredential implements UserDetails { + + private BigInteger id; + private String email; + private String password; + private String status; + + // roles -> Spring Security authority๋กœ ๋ณ€ํ™˜ + private List roles; + + // MyBatis GROUP_CONCAT ๊ฒฐ๊ณผ๋ฅผ List์œผ๋กœ ๋ณ€ํ™˜ํ•˜๋Š” setter + public void setRoles(String rolesString) { + if (rolesString != null && !rolesString.trim().isEmpty()) { + this.roles = Arrays.asList(rolesString.split(",")); + } else { + this.roles = new ArrayList<>(); + } + } + + public void setRoles(List roles) { + this.roles = roles; + } + + public List getRoles() { + return roles != null ? roles : new ArrayList<>(); + } + + @Override + public Collection getAuthorities() { + return getRoles().stream() + .map(role -> new SimpleGrantedAuthority("ROLE_" + role.trim())) // ROLE_ prefix ์ถ”๊ฐ€ + ๊ณต๋ฐฑ ์ œ๊ฑฐ + .collect(Collectors.toList()); + } + + @Override + public String getUsername() { + return email; // ๋กœ๊ทธ์ธ ID๋Š” email + } + + @Override + public boolean isAccountNonExpired() { + return true; // ํ•„์š” ์‹œ status ๊ธฐ๋ฐ˜์œผ๋กœ ๋ณ€๊ฒฝ ๊ฐ€๋Šฅ + } + + @Override + public boolean isAccountNonLocked() { + return !"LOCKED".equalsIgnoreCase(status); + } + + @Override + public boolean isCredentialsNonExpired() { + return true; + } + + @Override + public boolean isEnabled() { + return !"DISABLED".equalsIgnoreCase(status); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthCredentialAdapter.java b/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthCredentialAdapter.java new file mode 100644 index 00000000..86498143 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthCredentialAdapter.java @@ -0,0 +1,28 @@ +package site.icebang.domain.auth.service; + +import org.springframework.security.core.userdetails.UserDetails; +import org.springframework.security.core.userdetails.UserDetailsService; +import org.springframework.security.core.userdetails.UsernameNotFoundException; +import org.springframework.stereotype.Service; + +import lombok.RequiredArgsConstructor; + +import site.icebang.domain.auth.mapper.AuthMapper; +import site.icebang.domain.auth.model.AuthCredential; + +@Service +@RequiredArgsConstructor +public class AuthCredentialAdapter implements UserDetailsService { + private final AuthMapper authMapper; + + @Override + public UserDetails loadUserByUsername(String email) throws UsernameNotFoundException { + AuthCredential user = authMapper.findUserByEmail(email); + + if (user == null) { + throw new UsernameNotFoundException("User not found with email: " + email); + } + + return user; + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/service/AuthService.java b/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthService.java similarity index 81% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/service/AuthService.java rename to apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthService.java index 18010ed5..091861b2 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/service/AuthService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthService.java @@ -1,17 +1,17 @@ -package com.gltkorea.icebang.domain.auth.service; +package site.icebang.domain.auth.service; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import com.gltkorea.icebang.common.utils.RandomPasswordGenerator; -import com.gltkorea.icebang.domain.auth.dto.RegisterDto; -import com.gltkorea.icebang.domain.email.dto.EmailRequest; -import com.gltkorea.icebang.domain.email.service.EmailService; -import com.gltkorea.icebang.mapper.AuthMapper; - import lombok.RequiredArgsConstructor; +import site.icebang.common.utils.RandomPasswordGenerator; +import site.icebang.domain.auth.dto.RegisterDto; +import site.icebang.domain.auth.mapper.AuthMapper; +import site.icebang.domain.email.dto.EmailRequest; +import site.icebang.domain.email.service.EmailService; + @Service @RequiredArgsConstructor @Transactional diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/department/dto/DepartmentsCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/department/dto/DepartmentCardDo.java similarity index 69% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/department/dto/DepartmentsCardDto.java rename to apps/user-service/src/main/java/site/icebang/domain/department/dto/DepartmentCardDo.java index 5f50fabd..7644eb8e 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/department/dto/DepartmentsCardDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/department/dto/DepartmentCardDo.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.department.dto; +package site.icebang.domain.department.dto; import java.math.BigInteger; @@ -9,7 +9,7 @@ @Data @Builder @AllArgsConstructor -public class DepartmentsCardDto { +public class DepartmentCardDo { private BigInteger id; private String name; } diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/dto/EmailRequest.java b/apps/user-service/src/main/java/site/icebang/domain/email/dto/EmailRequest.java similarity index 85% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/dto/EmailRequest.java rename to apps/user-service/src/main/java/site/icebang/domain/email/dto/EmailRequest.java index fbd25749..89898055 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/dto/EmailRequest.java +++ b/apps/user-service/src/main/java/site/icebang/domain/email/dto/EmailRequest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.email.dto; +package site.icebang.domain.email.dto; import java.util.List; diff --git a/apps/user-service/src/main/java/site/icebang/domain/email/service/EmailService.java b/apps/user-service/src/main/java/site/icebang/domain/email/service/EmailService.java new file mode 100644 index 00000000..51646cc3 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/email/service/EmailService.java @@ -0,0 +1,7 @@ +package site.icebang.domain.email.service; + +import site.icebang.domain.email.dto.EmailRequest; + +public interface EmailService { + void send(EmailRequest emailRequest); +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/EmailServiceImpl.java b/apps/user-service/src/main/java/site/icebang/domain/email/service/EmailServiceImpl.java similarity index 77% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/EmailServiceImpl.java rename to apps/user-service/src/main/java/site/icebang/domain/email/service/EmailServiceImpl.java index a992de13..82047271 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/EmailServiceImpl.java +++ b/apps/user-service/src/main/java/site/icebang/domain/email/service/EmailServiceImpl.java @@ -1,12 +1,12 @@ -package com.gltkorea.icebang.domain.email.service; +package site.icebang.domain.email.service; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.stereotype.Service; -import com.gltkorea.icebang.domain.email.dto.EmailRequest; - import lombok.RequiredArgsConstructor; +import site.icebang.domain.email.dto.EmailRequest; + @Service @RequiredArgsConstructor @ConditionalOnMissingBean(EmailService.class) diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/MockEmailService.java b/apps/user-service/src/main/java/site/icebang/domain/email/service/MockEmailService.java similarity index 72% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/MockEmailService.java rename to apps/user-service/src/main/java/site/icebang/domain/email/service/MockEmailService.java index 6ccaffc9..d4392fe5 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/MockEmailService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/email/service/MockEmailService.java @@ -1,14 +1,14 @@ -package com.gltkorea.icebang.domain.email.service; +package site.icebang.domain.email.service; import org.springframework.context.annotation.Profile; import org.springframework.stereotype.Service; -import com.gltkorea.icebang.domain.email.dto.EmailRequest; - import lombok.extern.slf4j.Slf4j; +import site.icebang.domain.email.dto.EmailRequest; + @Service -@Profile({"test-unit", "test-e2e", "local", "develop"}) +@Profile({"test-unit", "test-e2e", "test-integration", "local", "develop"}) @Slf4j public class MockEmailService implements EmailService { diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/controller/OrganizationController.java b/apps/user-service/src/main/java/site/icebang/domain/organization/controller/OrganizationController.java similarity index 68% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/controller/OrganizationController.java rename to apps/user-service/src/main/java/site/icebang/domain/organization/controller/OrganizationController.java index ff3567b9..16ccbb65 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/controller/OrganizationController.java +++ b/apps/user-service/src/main/java/site/icebang/domain/organization/controller/OrganizationController.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.organization.controller; +package site.icebang.domain.organization.controller; import java.math.BigInteger; import java.util.List; @@ -9,13 +9,13 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; -import com.gltkorea.icebang.common.dto.ApiResponse; -import com.gltkorea.icebang.domain.organization.dto.OrganizationCardDto; -import com.gltkorea.icebang.domain.organization.dto.OrganizationOptionsDto; -import com.gltkorea.icebang.domain.organization.service.OrganizationService; - import lombok.RequiredArgsConstructor; +import site.icebang.common.dto.ApiResponse; +import site.icebang.domain.organization.dto.OrganizationCardDto; +import site.icebang.domain.organization.dto.OrganizationOptionDto; +import site.icebang.domain.organization.service.OrganizationService; + @RequestMapping("/v0/organizations") @RequiredArgsConstructor @RestController @@ -28,7 +28,7 @@ public ResponseEntity>> getOrganizations() } @GetMapping("/{id}/options") - public ResponseEntity> getOrganizationDetails( + public ResponseEntity> getOrganizationDetails( @PathVariable BigInteger id) { return ResponseEntity.ok(ApiResponse.success(organizationService.getOrganizationOptions(id))); } diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/organization/dto/OrganizationCardDto.java similarity index 81% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationCardDto.java rename to apps/user-service/src/main/java/site/icebang/domain/organization/dto/OrganizationCardDto.java index af0ef64b..a957adc0 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationCardDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/organization/dto/OrganizationCardDto.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.organization.dto; +package site.icebang.domain.organization.dto; import java.math.BigInteger; diff --git a/apps/user-service/src/main/java/site/icebang/domain/organization/dto/OrganizationOptionDto.java b/apps/user-service/src/main/java/site/icebang/domain/organization/dto/OrganizationOptionDto.java new file mode 100644 index 00000000..d7e670eb --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/organization/dto/OrganizationOptionDto.java @@ -0,0 +1,20 @@ +package site.icebang.domain.organization.dto; + +import java.util.List; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; + +import site.icebang.domain.department.dto.DepartmentCardDo; +import site.icebang.domain.position.dto.PositionCardDto; +import site.icebang.domain.roles.dto.RoleCardDto; + +@Builder +@Data +@AllArgsConstructor +public class OrganizationOptionDto { + List departments; + List positions; + List roles; +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/organization/mapper/OrganizationMapper.java b/apps/user-service/src/main/java/site/icebang/domain/organization/mapper/OrganizationMapper.java new file mode 100644 index 00000000..ed504cca --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/organization/mapper/OrganizationMapper.java @@ -0,0 +1,25 @@ +package site.icebang.domain.organization.mapper; + +import java.math.BigInteger; +import java.util.List; + +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import site.icebang.domain.department.dto.DepartmentCardDo; +import site.icebang.domain.organization.dto.OrganizationCardDto; +import site.icebang.domain.position.dto.PositionCardDto; +import site.icebang.domain.roles.dto.RoleCardDto; + +@Mapper +public interface OrganizationMapper { + List findAllOrganizations(); + + List findDepartmentsByOrganizationId( + @Param("organizationId") BigInteger organizationId); + + List findPositionsByOrganizationId( + @Param("organizationId") BigInteger organizationId); + + List findRolesByOrganizationId(@Param("organizationId") BigInteger organizationId); +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/organization/service/OrganizationService.java b/apps/user-service/src/main/java/site/icebang/domain/organization/service/OrganizationService.java new file mode 100644 index 00000000..cc035935 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/organization/service/OrganizationService.java @@ -0,0 +1,39 @@ +package site.icebang.domain.organization.service; + +import java.math.BigInteger; +import java.util.List; + +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import lombok.RequiredArgsConstructor; + +import site.icebang.domain.department.dto.DepartmentCardDo; +import site.icebang.domain.organization.dto.OrganizationCardDto; +import site.icebang.domain.organization.dto.OrganizationOptionDto; +import site.icebang.domain.organization.mapper.OrganizationMapper; +import site.icebang.domain.position.dto.PositionCardDto; +import site.icebang.domain.roles.dto.RoleCardDto; + +@Service +@RequiredArgsConstructor +public class OrganizationService { + private final OrganizationMapper organizationMapper; + + @Transactional(readOnly = true) + public List getAllOrganizationList() { + return organizationMapper.findAllOrganizations(); + } + + public OrganizationOptionDto getOrganizationOptions(BigInteger id) { + List departments = organizationMapper.findDepartmentsByOrganizationId(id); + List positions = organizationMapper.findPositionsByOrganizationId(id); + List roles = organizationMapper.findRolesByOrganizationId(id); + + return OrganizationOptionDto.builder() + .departments(departments) + .positions(positions) + .roles(roles) + .build(); + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/position/dto/PositionCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/position/dto/PositionCardDto.java similarity index 84% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/position/dto/PositionCardDto.java rename to apps/user-service/src/main/java/site/icebang/domain/position/dto/PositionCardDto.java index e97d7d3f..104b0cab 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/position/dto/PositionCardDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/position/dto/PositionCardDto.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.position.dto; +package site.icebang.domain.position.dto; import java.math.BigInteger; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/roles/dto/RolesCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/roles/dto/RoleCardDto.java similarity index 78% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/roles/dto/RolesCardDto.java rename to apps/user-service/src/main/java/site/icebang/domain/roles/dto/RoleCardDto.java index 709a08ff..737c8ed4 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/roles/dto/RolesCardDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/roles/dto/RoleCardDto.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.roles.dto; +package site.icebang.domain.roles.dto; import java.math.BigInteger; @@ -11,7 +11,7 @@ @Builder @AllArgsConstructor @NoArgsConstructor -public class RolesCardDto { +public class RoleCardDto { private BigInteger id; private String name; private String description; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/ScheduleMapper.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java similarity index 63% rename from apps/user-service/src/main/java/com/gltkorea/icebang/mapper/ScheduleMapper.java rename to apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java index 7220dc9e..c757fc36 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/ScheduleMapper.java +++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java @@ -1,10 +1,10 @@ -package com.gltkorea.icebang.mapper; +package site.icebang.domain.schedule.mapper; import java.util.List; import org.apache.ibatis.annotations.Mapper; -import com.gltkorea.icebang.domain.schedule.model.Schedule; +import site.icebang.domain.schedule.model.Schedule; @Mapper public interface ScheduleMapper { diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/model/Schedule.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java similarity index 81% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/model/Schedule.java rename to apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java index b9400b88..65c48366 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/model/Schedule.java +++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.schedule.model; +package site.icebang.domain.schedule.model; import lombok.Getter; import lombok.Setter; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/runner/SchedulerInitializer.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/runner/SchedulerInitializer.java similarity index 77% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/runner/SchedulerInitializer.java rename to apps/user-service/src/main/java/site/icebang/domain/schedule/runner/SchedulerInitializer.java index 7f96bba8..0dfb8b33 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/runner/SchedulerInitializer.java +++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/runner/SchedulerInitializer.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.schedule.runner; +package site.icebang.domain.schedule.runner; import java.util.List; @@ -6,13 +6,13 @@ import org.springframework.boot.ApplicationRunner; import org.springframework.stereotype.Component; -import com.gltkorea.icebang.domain.schedule.model.Schedule; -import com.gltkorea.icebang.domain.schedule.service.DynamicSchedulerService; -import com.gltkorea.icebang.mapper.ScheduleMapper; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; +import site.icebang.domain.schedule.mapper.ScheduleMapper; +import site.icebang.domain.schedule.model.Schedule; +import site.icebang.domain.schedule.service.DynamicSchedulerService; + @Slf4j @Component @RequiredArgsConstructor diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/service/DynamicSchedulerService.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/service/DynamicSchedulerService.java similarity index 95% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/service/DynamicSchedulerService.java rename to apps/user-service/src/main/java/site/icebang/domain/schedule/service/DynamicSchedulerService.java index a8bbeff1..372e0e1d 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/service/DynamicSchedulerService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/service/DynamicSchedulerService.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.schedule.service; +package site.icebang.domain.schedule.service; import java.time.LocalDateTime; import java.util.Map; @@ -13,11 +13,11 @@ import org.springframework.scheduling.support.CronTrigger; import org.springframework.stereotype.Service; -import com.gltkorea.icebang.domain.schedule.model.Schedule; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; +import site.icebang.domain.schedule.model.Schedule; + @Slf4j @Service @RequiredArgsConstructor diff --git a/apps/user-service/src/main/java/site/icebang/domain/user/controller/UserController.java b/apps/user-service/src/main/java/site/icebang/domain/user/controller/UserController.java new file mode 100644 index 00000000..db9b3fcf --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/user/controller/UserController.java @@ -0,0 +1,36 @@ +package site.icebang.domain.user.controller; + +import org.springframework.security.core.annotation.AuthenticationPrincipal; +import org.springframework.web.bind.annotation.*; + +import jakarta.validation.Valid; +import lombok.RequiredArgsConstructor; + +import site.icebang.common.dto.ApiResponse; +import site.icebang.domain.auth.model.AuthCredential; +import site.icebang.domain.user.dto.CheckEmailRequest; +import site.icebang.domain.user.dto.CheckEmailResponse; +import site.icebang.domain.user.dto.UserProfileResponseDto; +import site.icebang.domain.user.service.UserService; + +@RestController +@RequestMapping("/v0/users") +@RequiredArgsConstructor +public class UserController { + private final UserService userService; + + @PostMapping("/check-email") + public ApiResponse checkEmailAvailable( + @Valid @RequestBody CheckEmailRequest request) { + Boolean available = !userService.isExistEmail(request); + String message = available.equals(Boolean.TRUE) ? "์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ์ด๋ฉ”์ผ์ž…๋‹ˆ๋‹ค." : "์ด๋ฏธ ๊ฐ€์ž…๋œ ์ด๋ฉ”์ผ์ž…๋‹ˆ๋‹ค."; + + return ApiResponse.success(CheckEmailResponse.builder().available(available).build(), message); + } + + @GetMapping("/me") + public ApiResponse getUserProfile( + @AuthenticationPrincipal AuthCredential user) { + return ApiResponse.success(UserProfileResponseDto.from(user)); + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/CheckEmailRequest.java b/apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailRequest.java similarity index 88% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/CheckEmailRequest.java rename to apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailRequest.java index 49208315..f3b2c2a1 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/CheckEmailRequest.java +++ b/apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailRequest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.user.dto; +package site.icebang.domain.user.dto; import jakarta.validation.constraints.Email; import jakarta.validation.constraints.NotBlank; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/CheckEmailResponse.java b/apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailResponse.java similarity index 73% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/CheckEmailResponse.java rename to apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailResponse.java index 8b92d187..adda35d4 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/CheckEmailResponse.java +++ b/apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailResponse.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.user.dto; +package site.icebang.domain.user.dto; import lombok.Builder; import lombok.Data; diff --git a/apps/user-service/src/main/java/site/icebang/domain/user/dto/UserProfileResponseDto.java b/apps/user-service/src/main/java/site/icebang/domain/user/dto/UserProfileResponseDto.java new file mode 100644 index 00000000..6058f3aa --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/user/dto/UserProfileResponseDto.java @@ -0,0 +1,30 @@ +package site.icebang.domain.user.dto; + +import java.math.BigInteger; +import java.util.List; + +import lombok.Getter; + +import site.icebang.domain.auth.model.AuthCredential; + +@Getter +public class UserProfileResponseDto { + + private final BigInteger id; + private final String email; + private final String name; + private final List roles; + private final String status; + + public UserProfileResponseDto(AuthCredential authCredential) { + this.id = authCredential.getId(); + this.email = authCredential.getEmail(); + this.name = authCredential.getEmail(); // name ํ•„๋“œ๊ฐ€ ์—†์œผ๋ฉด email ์‚ฌ์šฉ + this.roles = authCredential.getRoles(); + this.status = authCredential.getStatus(); + } + + public static UserProfileResponseDto from(AuthCredential authCredential) { + return new UserProfileResponseDto(authCredential); + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/UserMapper.java b/apps/user-service/src/main/java/site/icebang/domain/user/mapper/UserMapper.java similarity index 87% rename from apps/user-service/src/main/java/com/gltkorea/icebang/mapper/UserMapper.java rename to apps/user-service/src/main/java/site/icebang/domain/user/mapper/UserMapper.java index 734fe8d5..d2e14012 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/UserMapper.java +++ b/apps/user-service/src/main/java/site/icebang/domain/user/mapper/UserMapper.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.mapper; +package site.icebang.domain.user.mapper; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; diff --git a/apps/user-service/src/main/java/site/icebang/domain/user/service/UserService.java b/apps/user-service/src/main/java/site/icebang/domain/user/service/UserService.java new file mode 100644 index 00000000..e3dce655 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/user/service/UserService.java @@ -0,0 +1,21 @@ +package site.icebang.domain.user.service; + +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import jakarta.validation.Valid; +import lombok.RequiredArgsConstructor; + +import site.icebang.domain.user.dto.CheckEmailRequest; +import site.icebang.domain.user.mapper.UserMapper; + +@Service +@RequiredArgsConstructor +public class UserService { + private final UserMapper userMapper; + + @Transactional(readOnly = true) + public Boolean isExistEmail(@Valid CheckEmailRequest request) { + return userMapper.existsByEmail(request.getEmail()); + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/aop/logging/LoggingAspect.java b/apps/user-service/src/main/java/site/icebang/global/aop/logging/LoggingAspect.java similarity index 87% rename from apps/user-service/src/main/java/com/gltkorea/icebang/aop/logging/LoggingAspect.java rename to apps/user-service/src/main/java/site/icebang/global/aop/logging/LoggingAspect.java index 0441820d..126c7d35 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/aop/logging/LoggingAspect.java +++ b/apps/user-service/src/main/java/site/icebang/global/aop/logging/LoggingAspect.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.aop.logging; +package site.icebang.global.aop.logging; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; @@ -13,13 +13,13 @@ @Component public class LoggingAspect { - @Pointcut("execution(public * com.gltkorea.icebang..controller..*(..))") + @Pointcut("execution(public * site.icebang..controller..*(..))") public void controllerMethods() {} - @Pointcut("execution(public * com.gltkorea.icebang..service..*(..))") + @Pointcut("execution(public * site.icebang..service..*(..))") public void serviceMethods() {} - @Pointcut("execution(public * com.gltkorea.icebang..service..repository..*(..))") + @Pointcut("execution(public * site.icebang..service..mapper..*(..))") public void repositoryMethods() {} @Around("controllerMethods()") diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/WebConfig.java b/apps/user-service/src/main/java/site/icebang/global/config/WebConfig.java similarity index 96% rename from apps/user-service/src/main/java/com/gltkorea/icebang/config/WebConfig.java rename to apps/user-service/src/main/java/site/icebang/global/config/WebConfig.java index 1ed10098..22fd4be8 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/config/WebConfig.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/WebConfig.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.config; +package site.icebang.global.config; import java.time.Duration; diff --git a/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/StringListTypeHandler.java b/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/StringListTypeHandler.java new file mode 100644 index 00000000..6aba5d96 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/StringListTypeHandler.java @@ -0,0 +1,50 @@ +package site.icebang.global.config.mybatis.typehandler; + +import java.sql.CallableStatement; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.apache.ibatis.type.BaseTypeHandler; +import org.apache.ibatis.type.JdbcType; +import org.apache.ibatis.type.MappedJdbcTypes; +import org.apache.ibatis.type.MappedTypes; + +@MappedTypes(List.class) +@MappedJdbcTypes(JdbcType.VARCHAR) +public class StringListTypeHandler extends BaseTypeHandler> { + + @Override + public void setNonNullParameter( + PreparedStatement ps, int i, List parameter, JdbcType jdbcType) throws SQLException { + ps.setString(i, String.join(",", parameter)); + } + + @Override + public List getNullableResult(ResultSet rs, String columnName) throws SQLException { + String value = rs.getString(columnName); + return convertToList(value); + } + + @Override + public List getNullableResult(ResultSet rs, int columnIndex) throws SQLException { + String value = rs.getString(columnIndex); + return convertToList(value); + } + + @Override + public List getNullableResult(CallableStatement cs, int columnIndex) throws SQLException { + String value = cs.getString(columnIndex); + return convertToList(value); + } + + private List convertToList(String value) { + if (value == null || value.trim().isEmpty()) { + return new ArrayList<>(); + } + return Arrays.asList(value.split(",")); + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/scheduler/SchedulerConfig.java b/apps/user-service/src/main/java/site/icebang/global/config/scheduler/SchedulerConfig.java similarity index 95% rename from apps/user-service/src/main/java/com/gltkorea/icebang/config/scheduler/SchedulerConfig.java rename to apps/user-service/src/main/java/site/icebang/global/config/scheduler/SchedulerConfig.java index 592eb0d7..79fc6436 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/config/scheduler/SchedulerConfig.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/scheduler/SchedulerConfig.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.config.scheduler; +package site.icebang.global.config.scheduler; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; diff --git a/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java b/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java new file mode 100644 index 00000000..457e388d --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java @@ -0,0 +1,129 @@ +package site.icebang.global.config.security; + +import java.security.SecureRandom; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.Environment; +import org.springframework.security.authentication.AuthenticationManager; +import org.springframework.security.authentication.AuthenticationProvider; +import org.springframework.security.authentication.dao.DaoAuthenticationProvider; +import org.springframework.security.config.annotation.authentication.configuration.AuthenticationConfiguration; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configurers.AbstractHttpConfigurer; +import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; +import org.springframework.security.crypto.password.NoOpPasswordEncoder; +import org.springframework.security.crypto.password.PasswordEncoder; +import org.springframework.security.web.SecurityFilterChain; +import org.springframework.web.cors.CorsConfiguration; +import org.springframework.web.cors.UrlBasedCorsConfigurationSource; +import org.springframework.web.filter.CorsFilter; + +import lombok.RequiredArgsConstructor; + +import site.icebang.domain.auth.service.AuthCredentialAdapter; +import site.icebang.global.config.security.endpoints.SecurityEndpoints; + +@Configuration +@RequiredArgsConstructor +public class SecurityConfig { + private final Environment environment; + private final AuthCredentialAdapter userDetailsService; + + @Bean + public AuthenticationProvider authenticationProvider() { + DaoAuthenticationProvider provider = new DaoAuthenticationProvider(); + provider.setUserDetailsService(userDetailsService); + provider.setPasswordEncoder(bCryptPasswordEncoder()); + return provider; + } + + @Bean + public AuthenticationManager authenticationManager(AuthenticationConfiguration config) + throws Exception { + return config.getAuthenticationManager(); + } + + @Bean + public SecureRandom secureRandom() { + return new SecureRandom(); + } + + @Bean + public SecurityFilterChain filterChain(HttpSecurity http) throws Exception { + return http.authorizeHttpRequests( + auth -> + auth.requestMatchers(SecurityEndpoints.PUBLIC.getMatchers()) + .permitAll() + .requestMatchers("/auth/login", "/auth/logout") + .permitAll() + .requestMatchers("/v0/auth/check-session") + .authenticated() + .requestMatchers(SecurityEndpoints.DATA_ADMIN.getMatchers()) + .hasRole("SUPER_ADMIN") // hasAuthority -> hasRole + .requestMatchers(SecurityEndpoints.DATA_ENGINEER.getMatchers()) + .hasAnyRole( + "SUPER_ADMIN", + "SYSTEM_ADMIN", + "AI_ENGINEER", + "DATA_SCIENTIST", + "CRAWLING_ENGINEER", + "TECH_LEAD", + "DEVOPS") + .requestMatchers(SecurityEndpoints.ANALYST.getMatchers()) + .hasAnyRole( + "SUPER_ADMIN", + "SYSTEM_ADMIN", + "ORG_ADMIN", + "DATA_SCIENTIST", + "MARKETING_ANALYST", + "QA_ENGINEER", + "PROJECT_MANAGER", + "PRODUCT_OWNER", + "USER") + .requestMatchers(SecurityEndpoints.OPS.getMatchers()) + .hasAnyRole( + "SUPER_ADMIN", + "SYSTEM_ADMIN", + "WORKFLOW_ADMIN", + "OPERATIONS_MANAGER", + "DEVOPS", + "TECH_LEAD") + .requestMatchers(SecurityEndpoints.USER.getMatchers()) + .hasAnyRole("SUPER_ADMIN", "SYSTEM_ADMIN", "ORG_ADMIN", "USER") + .anyRequest() + .authenticated()) + .formLogin(AbstractHttpConfigurer::disable) + .logout( + logout -> logout.logoutUrl("/auth/logout").logoutSuccessUrl("/auth/login").permitAll()) + .csrf(AbstractHttpConfigurer::disable) + .build(); + } + + @Bean + public PasswordEncoder bCryptPasswordEncoder() { + String[] activeProfiles = environment.getActiveProfiles(); + + for (String profile : activeProfiles) { + if ("develop".equals(profile) || profile.contains("test")) { + return NoOpPasswordEncoder.getInstance(); + } + } + return new BCryptPasswordEncoder(); + } + + @Bean + public CorsFilter corsFilter() { + CorsConfiguration config = new CorsConfiguration(); + config.addAllowedOrigin("http://localhost:3000"); // ํ”„๋ก ํŠธ ์ฃผ์†Œ + config.addAllowedOrigin("https://admin.icebang.site"); // ํ”„๋ก ํŠธ ์ฃผ์†Œ + config.addAllowedHeader("*"); + config.addAllowedMethod("*"); + config.setAllowCredentials(true); // ์„ธ์…˜ ์ฟ ํ‚ค ํ—ˆ์šฉ + + UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource(); + source.registerCorsConfiguration("/**", config); + + return new CorsFilter(source); + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java b/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java similarity index 79% rename from apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java rename to apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java index c73f462d..019337dc 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java @@ -1,16 +1,17 @@ -package com.gltkorea.icebang.config.security.endpoints; +package site.icebang.global.config.security.endpoints; public enum SecurityEndpoints { PUBLIC( "/", - "/login", - "/register", + "/ping", + "/v0/auth/login", "/api/public/**", "/health", "/css/**", "/js/**", "/images/**", - "/v0/**"), + "/v0/organizations/**", + "/v0/auth/register"), // ๋ฐ์ดํ„ฐ ๊ด€๋ฆฌ ๊ด€๋ จ ์—”๋“œํฌ์ธํŠธ DATA_ADMIN("/admin/**", "/api/admin/**", "/management/**", "/actuator/**"), @@ -25,7 +26,7 @@ public enum SecurityEndpoints { OPS("/api/scheduler/**", "/api/monitoring/**"), // ์ผ๋ฐ˜ ์‚ฌ์šฉ์ž ์—”๋“œํฌ์ธํŠธ - USER("/user/**", "/profile/**"); + USER("/user/**", "/profile/**", "/v0/auth/check-session"); private final String[] patterns; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/filter/LoggingFilter.java b/apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java similarity index 97% rename from apps/user-service/src/main/java/com/gltkorea/icebang/filter/LoggingFilter.java rename to apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java index e8dda321..e89f2d80 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/filter/LoggingFilter.java +++ b/apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.filter; +package site.icebang.global.filter; import java.io.IOException; import java.util.UUID; diff --git a/apps/user-service/src/main/resources/application-develop.yml b/apps/user-service/src/main/resources/application-develop.yml index 773a7333..e7bc3f09 100644 --- a/apps/user-service/src/main/resources/application-develop.yml +++ b/apps/user-service/src/main/resources/application-develop.yml @@ -26,7 +26,9 @@ spring: sql: init: mode: always - schema-locations: classpath:sql/schema.sql + schema-locations: + - classpath:sql/00-drop-maria.sql + - classpath:sql/01-schema.sql data-locations: - classpath:sql/00-truncate.sql - classpath:sql/01-insert-internal-users.sql @@ -35,7 +37,7 @@ spring: mybatis: mapper-locations: classpath:mybatis/mapper/**/*.xml - type-aliases-package: com.gltkorea.icebang.dto + type-aliases-package: site.icebang.dto configuration: map-underscore-to-camel-case: true diff --git a/apps/user-service/src/main/resources/application-production.yml b/apps/user-service/src/main/resources/application-production.yml index e69de29b..6b048fbd 100644 --- a/apps/user-service/src/main/resources/application-production.yml +++ b/apps/user-service/src/main/resources/application-production.yml @@ -0,0 +1,27 @@ +spring: + config: + activate: + on-profile: production + + datasource: + url: jdbc:mariadb://${DB_HOST}:${DB_PORT}/${DB_NAME} + username: ${DB_USER} + password: ${DB_PASS} + driver-class-name: org.mariadb.jdbc.Driver + + hikari: + connection-timeout: 30000 + idle-timeout: 600000 + max-lifetime: 1800000 + maximum-pool-size: 10 + minimum-idle: 5 + pool-name: HikariCP-MyBatis + +mybatis: + mapper-locations: classpath:mybatis/mapper/**/*.xml + type-aliases-package: site.icebang.dto + configuration: + map-underscore-to-camel-case: true + +logging: + config: classpath:log4j2-production.yml diff --git a/apps/user-service/src/main/resources/application-test-e2e.yml b/apps/user-service/src/main/resources/application-test-e2e.yml index 7703f4a3..f7dceba9 100644 --- a/apps/user-service/src/main/resources/application-test-e2e.yml +++ b/apps/user-service/src/main/resources/application-test-e2e.yml @@ -6,12 +6,14 @@ spring: sql: init: mode: always - schema-locations: classpath:sql/schema.sql + schema-locations: + - classpath:sql/00-drop-maria.sql + - classpath:sql/01-schema.sql encoding: UTF-8 mybatis: mapper-locations: classpath:mybatis/mapper/**/*.xml - type-aliases-package: com.gltkorea.icebang.dto + type-aliases-package: site.icebang.dto configuration: map-underscore-to-camel-case: true diff --git a/apps/user-service/src/main/resources/application-test-integration.yml b/apps/user-service/src/main/resources/application-test-integration.yml new file mode 100644 index 00000000..0ed34f36 --- /dev/null +++ b/apps/user-service/src/main/resources/application-test-integration.yml @@ -0,0 +1,42 @@ +spring: + config: + activate: + on-profile: test-integration + + # H2 ์ธ๋ฉ”๋ชจ๋ฆฌ ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์„ค์ • (Unit Test์šฉ) + datasource: + url: jdbc:h2:mem:testdb;MODE=MariaDB;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=TRUE + username: sa + password: + driver-class-name: org.h2.Driver + hikari: + connection-init-sql: "SET MODE MariaDB; SET NON_KEYWORDS USER;" + connection-timeout: 30000 + idle-timeout: 600000 + max-lifetime: 1800000 + maximum-pool-size: 10 + minimum-idle: 5 + pool-name: HikariCP-MyBatis + + # H2 ์›น ์ฝ˜์†” ํ™œ์„ฑํ™” (๋””๋ฒ„๊น…์šฉ) + h2: + console: + enabled: true + + # SQL ์Šคํฌ๋ฆฝํŠธ ์ดˆ๊ธฐํ™” ์„ค์ • + sql: + init: + mode: always + schema-locations: + - classpath:sql/00-drop-h2.sql + - classpath:sql/01-schema.sql + encoding: UTF-8 + +mybatis: + mapper-locations: classpath:mybatis/mapper/**/*.xml + type-aliases-package: site.icebang.dto + configuration: + map-underscore-to-camel-case: true + +logging: + config: classpath:log4j2-develop.yml \ No newline at end of file diff --git a/apps/user-service/src/main/resources/application-test-unit.yml b/apps/user-service/src/main/resources/application-test-unit.yml index fec65f43..d9a8059b 100644 --- a/apps/user-service/src/main/resources/application-test-unit.yml +++ b/apps/user-service/src/main/resources/application-test-unit.yml @@ -24,25 +24,18 @@ spring: console: enabled: true - # JPA ์„ค์ • (H2์šฉ) - jpa: - hibernate: - ddl-auto: create-drop - show-sql: true - properties: - hibernate: - dialect: org.hibernate.dialect.H2Dialect - # SQL ์Šคํฌ๋ฆฝํŠธ ์ดˆ๊ธฐํ™” ์„ค์ • sql: init: mode: always - schema-locations: classpath:sql/schema.sql + schema-locations: + - classpath:sql/00-drop-h2.sql + - classpath:sql/01-schema.sql encoding: UTF-8 mybatis: mapper-locations: classpath:mybatis/mapper/**/*.xml - type-aliases-package: com.gltkorea.icebang.dto + type-aliases-package: site.icebang.dto configuration: map-underscore-to-camel-case: true diff --git a/apps/user-service/src/main/resources/application.yml b/apps/user-service/src/main/resources/application.yml index 278dfb11..d0357684 100644 --- a/apps/user-service/src/main/resources/application.yml +++ b/apps/user-service/src/main/resources/application.yml @@ -3,6 +3,11 @@ spring: name: mvp profiles: active: develop + test: + context: + cache: + maxSize: 1 mybatis: # Mapper XML ํŒŒ์ผ ์œ„์น˜ - mapper-locations: classpath:mapper/**/*.xml \ No newline at end of file + mapper-locations: classpath:mapper/**/*.xml + type-handlers-package: site.icebang.config.mybatis.typehandler \ No newline at end of file diff --git a/apps/user-service/src/main/resources/log4j2-develop.yml b/apps/user-service/src/main/resources/log4j2-develop.yml index d1afc02b..f900c3b1 100644 --- a/apps/user-service/src/main/resources/log4j2-develop.yml +++ b/apps/user-service/src/main/resources/log4j2-develop.yml @@ -89,7 +89,7 @@ Configuration: - ref: file-error-appender # 2. ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ๋กœ๊ทธ - - name: com.gltkorea.icebang + - name: site.icebang additivity: "false" level: TRACE AppenderRef: @@ -119,6 +119,13 @@ Configuration: # 6. ํŠธ๋žœ์žญ์…˜ ๋กœ๊ทธ - DB ์ž‘์—… ์ถ”์  - name: org.springframework.transaction + level: DEBUG + additivity: "false" + AppenderRef: + - ref: console-appender + - ref: file-info-appender + + - name: site.icebang.domain.auth.mapper level: DEBUG additivity: "false" AppenderRef: diff --git a/apps/user-service/src/main/resources/log4j2-production.yml b/apps/user-service/src/main/resources/log4j2-production.yml index d1afc02b..31393458 100644 --- a/apps/user-service/src/main/resources/log4j2-production.yml +++ b/apps/user-service/src/main/resources/log4j2-production.yml @@ -89,7 +89,7 @@ Configuration: - ref: file-error-appender # 2. ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ๋กœ๊ทธ - - name: com.gltkorea.icebang + - name: site.icebang additivity: "false" level: TRACE AppenderRef: diff --git a/apps/user-service/src/main/resources/log4j2-test-unit.yml b/apps/user-service/src/main/resources/log4j2-test-unit.yml index 80df15cd..ef740431 100644 --- a/apps/user-service/src/main/resources/log4j2-test-unit.yml +++ b/apps/user-service/src/main/resources/log4j2-test-unit.yml @@ -38,7 +38,7 @@ Configuration: - ref: console-appender # 2. ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ๋กœ๊ทธ - - name: com.gltkorea.icebang + - name: site.icebang additivity: "false" level: INFO AppenderRef: diff --git a/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml index 0c36cc21..d98c7299 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml @@ -2,36 +2,50 @@ - + - - - - INSERT INTO users (name, email, password) + + + + + INSERT INTO user (name, email, password) VALUES (#{name}, #{email}, #{password}); - - - INSERT INTO user_organizations (user_id, organization_id, department_id, position_id, status) + + INSERT INTO user_organization (user_id, organization_id, department_id, position_id, status) VALUES (#{id}, #{orgId}, #{deptId}, #{positionId}, #{status}); - - - INSERT INTO user_roles (user_organization_id, role_id) + + INSERT INTO user_role (user_organization_id, role_id) VALUES (#{userOrgId}, #{roleId}) - + \ No newline at end of file diff --git a/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml index cdc403fb..40abe4d5 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml @@ -2,44 +2,44 @@ - + diff --git a/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml index 4a40fe49..3cdcc90e 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml @@ -1,16 +1,16 @@ - + - SELECT id AS scheduleId, workflow_id AS workflowId, cron_expression AS cronExpression, is_active AS isActive - FROM - schedules - WHERE + FROM + schedule + WHERE is_active = #{isActive} diff --git a/apps/user-service/src/main/resources/sql/00-drop-h2.sql b/apps/user-service/src/main/resources/sql/00-drop-h2.sql new file mode 100644 index 00000000..d0c7bda3 --- /dev/null +++ b/apps/user-service/src/main/resources/sql/00-drop-h2.sql @@ -0,0 +1,6 @@ +SET FOREIGN_KEY_CHECKS = 0; + +-- H2์—์„œ ๋ชจ๋“  ํ…Œ์ด๋ธ”๊ณผ ๊ฐ์ฒด๋ฅผ ์‚ญ์ œํ•˜๋Š” ์˜ฌ๋ฐ”๋ฅธ ๊ตฌ๋ฌธ +DROP ALL OBJECTS; + +SET FOREIGN_KEY_CHECKS = 1; \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/00-drop-maria.sql b/apps/user-service/src/main/resources/sql/00-drop-maria.sql new file mode 100644 index 00000000..d93b57b4 --- /dev/null +++ b/apps/user-service/src/main/resources/sql/00-drop-maria.sql @@ -0,0 +1,18 @@ +SET FOREIGN_KEY_CHECKS = 0; +SET @tables = NULL; + +-- 1. ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ๋‚ด ๋ชจ๋“  ํ…Œ์ด๋ธ” ๋ชฉ๋ก์„ ๊ฐ€์ ธ์™€ ๋ณ€์ˆ˜์— ์ €์žฅ +-- ๋ฐฑํ‹ฑ(`)์„ ์‚ฌ์šฉํ•˜์—ฌ ํ…Œ์ด๋ธ” ์ด๋ฆ„์— ๊ณต๋ฐฑ์ด๋‚˜ ํŠน์ˆ˜ ๋ฌธ์ž๊ฐ€ ์žˆ์–ด๋„ ์•ˆ์ „ํ•˜๊ฒŒ ์ฒ˜๋ฆฌํ•ฉ๋‹ˆ๋‹ค. +SELECT GROUP_CONCAT(CONCAT('`', table_name, '`')) INTO @tables +FROM information_schema.tables +WHERE table_schema = DATABASE(); + +-- 2. ๋ณ€์ˆ˜ ๊ฐ’์ด NULL์ธ ๊ฒฝ์šฐ๋ฅผ ๋Œ€๋น„ํ•˜์—ฌ ์กฐ๊ฑด๋ฌธ ์ถ”๊ฐ€ ๋ฐ DROP TABLE ๊ตฌ๋ฌธ ์ƒ์„ฑ +SET @drop_tables_sql = IFNULL(CONCAT('DROP TABLE ', @tables), 'SELECT "No tables to drop";'); + +-- 3. ๋™์  SQL ์‹คํ–‰ +PREPARE stmt FROM @drop_tables_sql; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +SET FOREIGN_KEY_CHECKS = 1; \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/00-truncate.sql b/apps/user-service/src/main/resources/sql/00-truncate.sql index 93cbfd4a..497b6a4e 100644 --- a/apps/user-service/src/main/resources/sql/00-truncate.sql +++ b/apps/user-service/src/main/resources/sql/00-truncate.sql @@ -1,15 +1,12 @@ --- ๋ฐ์ดํ„ฐ ์ดˆ๊ธฐํ™” ์ „์— ์ถ”๊ฐ€ -SET FOREIGN_KEY_CHECKS = 0; +-- ๋ฐ์ดํ„ฐ ์ดˆ๊ธฐํ™” ์Šคํฌ๋ฆฝํŠธ (์™ธ๋ž˜ ํ‚ค ์ œ์•ฝ์กฐ๊ฑด์ด ์—†๋Š” ์Šคํ‚ค๋งˆ์šฉ) --- ์—ญ์ˆœ์œผ๋กœ TRUNCATE (์ฐธ์กฐ๋˜๋Š” ํ…Œ์ด๋ธ”์„ ๋‚˜์ค‘์—) -TRUNCATE TABLE user_roles; -TRUNCATE TABLE role_permissions; -TRUNCATE TABLE user_organizations; -TRUNCATE TABLE users; -TRUNCATE TABLE positions; -TRUNCATE TABLE departments; -TRUNCATE TABLE roles; -TRUNCATE TABLE permissions; -TRUNCATE TABLE organizations; - -SET FOREIGN_KEY_CHECKS = 1; \ No newline at end of file +-- ์‚ฌ์šฉ์ž ๋ฐ ์กฐ์ง ๊ด€๋ จ ํ…Œ์ด๋ธ” +TRUNCATE TABLE `user_role`; +TRUNCATE TABLE `role_permission`; +TRUNCATE TABLE `user_organization`; +TRUNCATE TABLE `user`; +TRUNCATE TABLE `position`; +TRUNCATE TABLE `department`; +TRUNCATE TABLE `role`; +TRUNCATE TABLE `permission`; +TRUNCATE TABLE `organization`; \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/01-insert-internal-users.sql b/apps/user-service/src/main/resources/sql/01-insert-internal-users.sql index 29f1f81a..1a69076e 100644 --- a/apps/user-service/src/main/resources/sql/01-insert-internal-users.sql +++ b/apps/user-service/src/main/resources/sql/01-insert-internal-users.sql @@ -1,29 +1,31 @@ -- icebang ๋‚ด๋ถ€ ์ง์› ์ „์ฒด INSERT -- 1. icebang ์กฐ์ง -INSERT INTO `organizations` (`name`, `domain_name`) VALUES +INSERT INTO `organization` (`name`, `domain_name`) VALUES ('icebang', 'icebang.site'); -- 2. icebang ๋ถ€์„œ๋“ค -INSERT INTO `departments` (`organization_id`, `name`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'AI๊ฐœ๋ฐœํŒ€'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '๋ฐ์ดํ„ฐํŒ€'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '์ฝ˜ํ…์ธ ํŒ€'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '๋งˆ์ผ€ํŒ…ํŒ€'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '์šด์˜ํŒ€'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '๊ธฐํšํŒ€'); +SET @org_id = (SELECT id FROM organization WHERE domain_name = 'icebang.site' LIMIT 1); + +INSERT INTO `department` (`organization_id`, `name`) VALUES + (@org_id, 'AI๊ฐœ๋ฐœํŒ€'), + (@org_id, '๋ฐ์ดํ„ฐํŒ€'), + (@org_id, '์ฝ˜ํ…์ธ ํŒ€'), + (@org_id, '๋งˆ์ผ€ํŒ…ํŒ€'), + (@org_id, '์šด์˜ํŒ€'), + (@org_id, '๊ธฐํšํŒ€'); -- 3. icebang ์ง์ฑ…๋“ค -INSERT INTO `positions` (`organization_id`, `title`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'CEO'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'CTO'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'ํŒ€์žฅ'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '์‹œ๋‹ˆ์–ด'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '์ฃผ๋‹ˆ์–ด'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '์ธํ„ด'); +INSERT INTO `position` (`organization_id`, `title`) VALUES + (@org_id, 'CEO'), + (@org_id, 'CTO'), + (@org_id, 'ํŒ€์žฅ'), + (@org_id, '์‹œ๋‹ˆ์–ด'), + (@org_id, '์ฃผ๋‹ˆ์–ด'), + (@org_id, '์ธํ„ด'); -- 4. ๋ฐ”์ด๋Ÿด ์ฝ˜ํ…์ธ  ์›Œํฌํ”Œ๋กœ์šฐ ๊ถŒํ•œ๋“ค -INSERT INTO `permissions` (`resource`, `description`) VALUES +INSERT INTO `permission` (`resource`, `description`) VALUES -- ์‚ฌ์šฉ์ž ๊ด€๋ฆฌ ('users.create', '์‚ฌ์šฉ์ž ์ƒ์„ฑ'), ('users.read', '์‚ฌ์šฉ์ž ์กฐํšŒ'), @@ -117,145 +119,93 @@ INSERT INTO `permissions` (`resource`, `description`) VALUES ('system.backup.restore', '์‹œ์Šคํ…œ ๋ฐฑ์—… ๋ณต์›'); -- 5. ์‹œ์Šคํ…œ ๊ณตํ†ต ์—ญํ•  -INSERT INTO `roles` (`organization_id`, `name`, `description`) VALUES - (NULL, 'SUPER_ADMIN', '์ตœ๊ณ  ๊ด€๋ฆฌ์ž - ๋ชจ๋“  ๊ถŒํ•œ'), - (NULL, 'SYSTEM_ADMIN', '์‹œ์Šคํ…œ ๊ด€๋ฆฌ์ž - ์‹œ์Šคํ…œ ์„ค์ • ๋ฐ ๊ด€๋ฆฌ'), - (NULL, 'ORG_ADMIN', '์กฐ์ง ๊ด€๋ฆฌ์ž - ์กฐ์ง ๋‚ด ๋ชจ๋“  ๊ถŒํ•œ'), - (NULL, 'USER', '์ผ๋ฐ˜ ์‚ฌ์šฉ์ž - ๊ธฐ๋ณธ ์‚ฌ์šฉ ๊ถŒํ•œ'), - (NULL, 'GUEST', '๊ฒŒ์ŠคํŠธ - ์ œํ•œ๋œ ์กฐํšŒ ๊ถŒํ•œ'); +INSERT INTO `role` (`organization_id`, `name`, `description`) VALUES + (NULL, 'SUPER_ADMIN', '์ตœ๊ณ  ๊ด€๋ฆฌ์ž - ๋ชจ๋“  ๊ถŒํ•œ'), + (NULL, 'SYSTEM_ADMIN', '์‹œ์Šคํ…œ ๊ด€๋ฆฌ์ž - ์‹œ์Šคํ…œ ์„ค์ • ๋ฐ ๊ด€๋ฆฌ'), + (NULL, 'ORG_ADMIN', '์กฐ์ง ๊ด€๋ฆฌ์ž - ์กฐ์ง ๋‚ด ๋ชจ๋“  ๊ถŒํ•œ'), + (NULL, 'USER', '์ผ๋ฐ˜ ์‚ฌ์šฉ์ž - ๊ธฐ๋ณธ ์‚ฌ์šฉ ๊ถŒํ•œ'), + (NULL, 'GUEST', '๊ฒŒ์ŠคํŠธ - ์ œํ•œ๋œ ์กฐํšŒ ๊ถŒํ•œ'); -- 6. icebang ์ „์šฉ ์—ญํ•  -INSERT INTO `roles` (`organization_id`, `name`, `description`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'AI_ENGINEER', 'AI ์—”์ง€๋‹ˆ์–ด - AI ๋ชจ๋ธ ๊ฐœ๋ฐœ ๋ฐ ์ตœ์ ํ™”'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'DATA_SCIENTIST', '๋ฐ์ดํ„ฐ ์‚ฌ์ด์–ธํ‹ฐ์ŠคํŠธ - ๋ฐ์ดํ„ฐ ๋ถ„์„ ๋ฐ ์ธ์‚ฌ์ดํŠธ ๋„์ถœ'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'CRAWLING_ENGINEER', 'ํฌ๋กค๋ง ์—”์ง€๋‹ˆ์–ด - ์›น ํฌ๋กค๋ง ์‹œ์Šคํ…œ ๊ฐœ๋ฐœ'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'CONTENT_CREATOR', '์ฝ˜ํ…์ธ  ํฌ๋ฆฌ์—์ดํ„ฐ - ๋ฐ”์ด๋Ÿด ์ฝ˜ํ…์ธ  ์ œ์ž‘'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'CONTENT_MANAGER', '์ฝ˜ํ…์ธ  ๋งค๋‹ˆ์ € - ์ฝ˜ํ…์ธ  ๊ธฐํš ๋ฐ ๊ด€๋ฆฌ'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'WORKFLOW_ADMIN', '์›Œํฌํ”Œ๋กœ์šฐ ๊ด€๋ฆฌ์ž - ์ž๋™ํ™” ํ”„๋กœ์„ธ์Šค ๊ด€๋ฆฌ'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'MARKETING_ANALYST', '๋งˆ์ผ€ํŒ… ๋ถ„์„๊ฐ€ - ๋งˆ์ผ€ํŒ… ์„ฑ๊ณผ ๋ถ„์„'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'OPERATIONS_MANAGER', '์šด์˜ ๋งค๋‹ˆ์ € - ์‹œ์Šคํ…œ ์šด์˜ ๋ฐ ๋ชจ๋‹ˆํ„ฐ๋ง'); +INSERT INTO `role` (`organization_id`, `name`, `description`) VALUES + (@org_id, 'AI_ENGINEER', 'AI ์—”์ง€๋‹ˆ์–ด - AI ๋ชจ๋ธ ๊ฐœ๋ฐœ ๋ฐ ์ตœ์ ํ™”'), + (@org_id, 'DATA_SCIENTIST', '๋ฐ์ดํ„ฐ ์‚ฌ์ด์–ธํ‹ฐ์ŠคํŠธ - ๋ฐ์ดํ„ฐ ๋ถ„์„ ๋ฐ ์ธ์‚ฌ์ดํŠธ ๋„์ถœ'), + (@org_id, 'CRAWLING_ENGINEER', 'ํฌ๋กค๋ง ์—”์ง€๋‹ˆ์–ด - ์›น ํฌ๋กค๋ง ์‹œ์Šคํ…œ ๊ฐœ๋ฐœ'), + (@org_id, 'CONTENT_CREATOR', '์ฝ˜ํ…์ธ  ํฌ๋ฆฌ์—์ดํ„ฐ - ๋ฐ”์ด๋Ÿด ์ฝ˜ํ…์ธ  ์ œ์ž‘'), + (@org_id, 'CONTENT_MANAGER', '์ฝ˜ํ…์ธ  ๋งค๋‹ˆ์ € - ์ฝ˜ํ…์ธ  ๊ธฐํš ๋ฐ ๊ด€๋ฆฌ'), + (@org_id, 'WORKFLOW_ADMIN', '์›Œํฌํ”Œ๋กœ์šฐ ๊ด€๋ฆฌ์ž - ์ž๋™ํ™” ํ”„๋กœ์„ธ์Šค ๊ด€๋ฆฌ'), + (@org_id, 'MARKETING_ANALYST', '๋งˆ์ผ€ํŒ… ๋ถ„์„๊ฐ€ - ๋งˆ์ผ€ํŒ… ์„ฑ๊ณผ ๋ถ„์„'), + (@org_id, 'OPERATIONS_MANAGER', '์šด์˜ ๋งค๋‹ˆ์ € - ์‹œ์Šคํ…œ ์šด์˜ ๋ฐ ๋ชจ๋‹ˆํ„ฐ๋ง'); -- 7. icebang ์ง์›๋“ค -INSERT INTO `users` (`name`, `email`, `password`, `status`) VALUES - ('๊น€์•„์ด์Šค', 'ice.kim@icebang.site', '$2a$10$encrypted_password_hash1', 'ACTIVE'), - ('๋ฐ•๋ฐฉ๋ฐฉ', 'bang.park@icebang.site', '$2a$10$encrypted_password_hash2', 'ACTIVE'), - ('์ดํŠธ๋ Œ๋“œ', 'trend.lee@icebang.site', '$2a$10$encrypted_password_hash3', 'ACTIVE'), - ('์ •๋ฐ”์ด๋Ÿด', 'viral.jung@icebang.site', '$2a$10$encrypted_password_hash4', 'ACTIVE'), - ('์ตœ์ฝ˜ํ…์ธ ', 'content.choi@icebang.site', '$2a$10$encrypted_password_hash5', 'ACTIVE'), - ('ํ™ํฌ๋กค๋Ÿฌ', 'crawler.hong@icebang.site', '$2a$10$encrypted_password_hash6', 'ACTIVE'), - ('์„œ๋ฐ์ดํ„ฐ', 'data.seo@icebang.site', '$2a$10$encrypted_password_hash7', 'ACTIVE'), - ('์œค์›Œํฌํ”Œ๋กœ', 'workflow.yoon@icebang.site', '$2a$10$encrypted_password_hash8', 'ACTIVE'), - ('์‹œ์Šคํ…œ๊ด€๋ฆฌ์ž', 'admin@icebang.site', '$2a$10$encrypted_password_hash0', 'ACTIVE'); +INSERT INTO `user` (`name`, `email`, `password`, `status`) VALUES + ('๊น€์•„์ด์Šค', 'ice.kim@icebang.site', '$2a$10$encrypted_password_hash1', 'ACTIVE'), + ('๋ฐ•๋ฐฉ๋ฐฉ', 'bang.park@icebang.site', '$2a$10$encrypted_password_hash2', 'ACTIVE'), + ('์ดํŠธ๋ Œ๋“œ', 'trend.lee@icebang.site', '$2a$10$encrypted_password_hash3', 'ACTIVE'), + ('์ •๋ฐ”์ด๋Ÿด', 'viral.jung@icebang.site', '$2a$10$encrypted_password_hash4', 'ACTIVE'), + ('์ตœ์ฝ˜ํ…์ธ ', 'content.choi@icebang.site', '$2a$10$encrypted_password_hash5', 'ACTIVE'), + ('ํ™ํฌ๋กค๋Ÿฌ', 'crawler.hong@icebang.site', '$2a$10$encrypted_password_hash6', 'ACTIVE'), + ('์„œ๋ฐ์ดํ„ฐ', 'data.seo@icebang.site', '$2a$10$encrypted_password_hash7', 'ACTIVE'), + ('์œค์›Œํฌํ”Œ๋กœ', 'workflow.yoon@icebang.site', '$2a$10$encrypted_password_hash8', 'ACTIVE'), + ('์‹œ์Šคํ…œ๊ด€๋ฆฌ์ž', 'admin@icebang.site', 'qwer1234!A', 'ACTIVE'); -- 8. icebang ์ง์›-์กฐ์ง ์—ฐ๊ฒฐ -INSERT INTO `user_organizations` (`user_id`, `organization_id`, `position_id`, `department_id`, `employee_number`, `status`) VALUES --- ๊น€์•„์ด์Šค - CEO, ๊ธฐํšํŒ€ -((SELECT id FROM users WHERE email = 'ice.kim@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = 'CEO' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = '๊ธฐํšํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'PLN25001', 'ACTIVE'), - --- ๋ฐ•๋ฐฉ๋ฐฉ - CTO, AI๊ฐœ๋ฐœํŒ€ -((SELECT id FROM users WHERE email = 'bang.park@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = 'CTO' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = 'AI๊ฐœ๋ฐœํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'AI25001', 'ACTIVE'), - --- ์ดํŠธ๋ Œ๋“œ - ํŒ€์žฅ, ๋ฐ์ดํ„ฐํŒ€ -((SELECT id FROM users WHERE email = 'trend.lee@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = 'ํŒ€์žฅ' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = '๋ฐ์ดํ„ฐํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'DAT25001', 'ACTIVE'), - --- ์ •๋ฐ”์ด๋Ÿด - ํŒ€์žฅ, ์ฝ˜ํ…์ธ ํŒ€ -((SELECT id FROM users WHERE email = 'viral.jung@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = 'ํŒ€์žฅ' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = '์ฝ˜ํ…์ธ ํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'CON25001', 'ACTIVE'), - -((SELECT id FROM users WHERE email = 'content.choi@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = '์‹œ๋‹ˆ์–ด' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = '์ฝ˜ํ…์ธ ํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'CON25002', 'ACTIVE'), - --- ํ™ํฌ๋กค๋Ÿฌ - ์‹œ๋‹ˆ์–ด, AI๊ฐœ๋ฐœํŒ€ -((SELECT id FROM users WHERE email = 'crawler.hong@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = '์‹œ๋‹ˆ์–ด' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = 'AI๊ฐœ๋ฐœํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'AI25002', 'ACTIVE'), - --- ์„œ๋ฐ์ดํ„ฐ - ์‹œ๋‹ˆ์–ด, ๋ฐ์ดํ„ฐํŒ€ -((SELECT id FROM users WHERE email = 'data.seo@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = '์‹œ๋‹ˆ์–ด' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = '๋ฐ์ดํ„ฐํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'DAT25002', 'ACTIVE'), - --- ์œค์›Œํฌํ”Œ๋กœ - ํŒ€์žฅ, ์šด์˜ํŒ€ -((SELECT id FROM users WHERE email = 'workflow.yoon@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = 'ํŒ€์žฅ' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = '์šด์˜ํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'OPS25001', 'ACTIVE'), - --- ์‹œ์Šคํ…œ๊ด€๋ฆฌ์ž - CTO, ์šด์˜ํŒ€ -((SELECT id FROM users WHERE email = 'admin@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = 'CTO' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = '์šด์˜ํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'OPS25000', 'ACTIVE'); +INSERT INTO `user_organization` (`user_id`, `organization_id`, `position_id`, `department_id`, `employee_number`, `status`) VALUES + ((SELECT id FROM user WHERE email = 'ice.kim@icebang.site'), @org_id, (SELECT id FROM position WHERE title = 'CEO' AND organization_id = @org_id), (SELECT id FROM department WHERE name = '๊ธฐํšํŒ€' AND organization_id = @org_id), 'PLN25001', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'bang.park@icebang.site'), @org_id, (SELECT id FROM position WHERE title = 'CTO' AND organization_id = @org_id), (SELECT id FROM department WHERE name = 'AI๊ฐœ๋ฐœํŒ€' AND organization_id = @org_id), 'AI25001', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'trend.lee@icebang.site'), @org_id, (SELECT id FROM position WHERE title = 'ํŒ€์žฅ' AND organization_id = @org_id), (SELECT id FROM department WHERE name = '๋ฐ์ดํ„ฐํŒ€' AND organization_id = @org_id), 'DAT25001', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'viral.jung@icebang.site'), @org_id, (SELECT id FROM position WHERE title = 'ํŒ€์žฅ' AND organization_id = @org_id), (SELECT id FROM department WHERE name = '์ฝ˜ํ…์ธ ํŒ€' AND organization_id = @org_id), 'CON25001', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'content.choi@icebang.site'), @org_id, (SELECT id FROM position WHERE title = '์‹œ๋‹ˆ์–ด' AND organization_id = @org_id), (SELECT id FROM department WHERE name = '์ฝ˜ํ…์ธ ํŒ€' AND organization_id = @org_id), 'CON25002', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'crawler.hong@icebang.site'), @org_id, (SELECT id FROM position WHERE title = '์‹œ๋‹ˆ์–ด' AND organization_id = @org_id), (SELECT id FROM department WHERE name = 'AI๊ฐœ๋ฐœํŒ€' AND organization_id = @org_id), 'AI25002', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'data.seo@icebang.site'), @org_id, (SELECT id FROM position WHERE title = '์‹œ๋‹ˆ์–ด' AND organization_id = @org_id), (SELECT id FROM department WHERE name = '๋ฐ์ดํ„ฐํŒ€' AND organization_id = @org_id), 'DAT25002', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'workflow.yoon@icebang.site'), @org_id, (SELECT id FROM position WHERE title = 'ํŒ€์žฅ' AND organization_id = @org_id), (SELECT id FROM department WHERE name = '์šด์˜ํŒ€' AND organization_id = @org_id), 'OPS25001', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'admin@icebang.site'), @org_id, (SELECT id FROM position WHERE title = 'CTO' AND organization_id = @org_id), (SELECT id FROM department WHERE name = '์šด์˜ํŒ€' AND organization_id = @org_id), 'OPS25000', 'ACTIVE'); -- 9. ์—ญํ• ๋ณ„ ๊ถŒํ•œ ํ• ๋‹น -- SUPER_ADMIN ๋ชจ๋“  ๊ถŒํ•œ -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'SUPER_ADMIN'), + (SELECT id FROM role WHERE name = 'SUPER_ADMIN'), id -FROM permissions; +FROM permission; -- ORG_ADMIN ์กฐ์ง ๋‚ด ๋ชจ๋“  ๊ถŒํ•œ (์‹œ์Šคํ…œ ๊ถŒํ•œ ์ œ์™ธ) -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'ORG_ADMIN'), + (SELECT id FROM role WHERE name = 'ORG_ADMIN'), id -FROM permissions +FROM permission WHERE resource NOT LIKE 'system.%'; -- AI_ENGINEER ๊ถŒํ•œ -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'AI_ENGINEER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'AI_ENGINEER' AND organization_id = @org_id), id -FROM permissions +FROM permission WHERE resource LIKE 'ai.%' OR resource LIKE 'crawling.%' OR resource LIKE 'workflows.%' OR resource IN ('content.read', 'trends.read', 'analytics.read'); -- DATA_SCIENTIST ๊ถŒํ•œ -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'DATA_SCIENTIST' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'DATA_SCIENTIST' AND organization_id = @org_id), id -FROM permissions +FROM permission WHERE resource LIKE 'trends.%' OR resource LIKE 'analytics.%' OR resource LIKE 'reports.%' OR resource IN ('content.read', 'campaigns.read', 'crawling.read'); -- CONTENT_MANAGER ๊ถŒํ•œ -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'CONTENT_MANAGER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'CONTENT_MANAGER' AND organization_id = @org_id), id -FROM permissions +FROM permission WHERE resource LIKE 'content.%' OR resource LIKE 'campaigns.%' OR resource LIKE 'trends.%' @@ -263,11 +213,11 @@ WHERE resource LIKE 'content.%' OR resource IN ('users.read.department'); -- WORKFLOW_ADMIN ๊ถŒํ•œ -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'WORKFLOW_ADMIN' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'WORKFLOW_ADMIN' AND organization_id = @org_id), id -FROM permissions +FROM permission WHERE resource LIKE 'workflows.%' OR resource LIKE 'ai.%' OR resource LIKE 'crawling.%' @@ -277,54 +227,54 @@ WHERE resource LIKE 'workflows.%' -- 10. icebang ์ง์›๋ณ„ ์—ญํ•  ํ• ๋‹น -- ๊น€์•„์ด์Šค(CEO) - ORG_ADMIN -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'ORG_ADMIN'), + (SELECT id FROM role WHERE name = 'ORG_ADMIN'), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'ice.kim@icebang.site'; -- ๋ฐ•๋ฐฉ๋ฐฉ(CTO) - AI_ENGINEER + WORKFLOW_ADMIN -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'AI_ENGINEER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'AI_ENGINEER' AND organization_id = @org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'bang.park@icebang.site'; -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'WORKFLOW_ADMIN' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'WORKFLOW_ADMIN' AND organization_id = @org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'bang.park@icebang.site'; -- ์ •๋ฐ”์ด๋Ÿด(์ฝ˜ํ…์ธ ํŒ€์žฅ) - CONTENT_MANAGER -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'CONTENT_MANAGER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'CONTENT_MANAGER' AND organization_id = @org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'viral.jung@icebang.site'; -- ์ดํŠธ๋ Œ๋“œ(๋ฐ์ดํ„ฐํŒ€์žฅ) - DATA_SCIENTIST -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'DATA_SCIENTIST' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'DATA_SCIENTIST' AND organization_id = @org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'trend.lee@icebang.site'; -- ์‹œ์Šคํ…œ๊ด€๋ฆฌ์ž - SUPER_ADMIN -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'SUPER_ADMIN'), + (SELECT id FROM role WHERE name = 'SUPER_ADMIN'), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'admin@icebang.site'; \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/01-schema.sql b/apps/user-service/src/main/resources/sql/01-schema.sql new file mode 100644 index 00000000..569c452a --- /dev/null +++ b/apps/user-service/src/main/resources/sql/01-schema.sql @@ -0,0 +1,287 @@ +-- MariaDB ์ตœ์ ํ™”๋œ ์Šคํ‚ค๋งˆ (๋‹จ์ˆ˜ํ˜• ํ…Œ์ด๋ธ” ๋„ค์ด๋ฐ, ์™ธ๋ž˜ ํ‚ค ์ œ์•ฝ์กฐ๊ฑด ์ œ๊ฑฐ ๋ฒ„์ „) +CREATE TABLE `permission` ( + `id` int unsigned NOT NULL AUTO_INCREMENT, + `resource` varchar(100) NULL, + `description` varchar(255) NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `is_active` boolean DEFAULT TRUE, + `updated_by` bigint unsigned NULL, + `created_by` bigint unsigned NULL, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `organization` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(150) NULL, + `domain_name` varchar(100) NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `role` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `organization_id` bigint unsigned NULL, + `name` varchar(100) NULL, + `description` varchar(500) NULL, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `user` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(50) NULL, + `email` varchar(100) NULL, + `password` varchar(255) NULL, + `status` varchar(20) NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `department` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `organization_id` bigint unsigned NOT NULL, + `name` varchar(100) NULL, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `position` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `organization_id` bigint unsigned NOT NULL, + `title` varchar(100) NULL, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `user_organization` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `user_id` bigint unsigned NOT NULL, + `organization_id` bigint unsigned NOT NULL, + `position_id` bigint unsigned NOT NULL, + `department_id` bigint unsigned NOT NULL, + `employee_number` varchar(50) NULL, + `status` varchar(20) NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `role_permission` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `role_id` bigint unsigned NOT NULL, + `permission_id` int unsigned NOT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_role_permission` (`role_id`, `permission_id`) + ); + +CREATE TABLE `user_role` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `role_id` bigint unsigned NOT NULL, + `user_organization_id` bigint unsigned NOT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_user_role` (`role_id`, `user_organization_id`) + ); + +-- ์„ฑ๋Šฅ ์ตœ์ ํ™”๋ฅผ ์œ„ํ•œ ์ธ๋ฑ์Šค +CREATE INDEX `idx_user_email` ON `user` (`email`); +CREATE INDEX `idx_user_status` ON `user` (`status`); +CREATE INDEX `idx_user_organization_user` ON `user_organization` (`user_id`); +CREATE INDEX `idx_user_organization_org` ON `user_organization` (`organization_id`); +CREATE INDEX `idx_user_organization_status` ON `user_organization` (`status`); +CREATE INDEX `idx_role_org` ON `role` (`organization_id`); +CREATE INDEX `idx_permission_resource` ON `permission` (`resource`); +CREATE INDEX `idx_permission_active` ON `permission` (`is_active`); + + + +CREATE TABLE `workflow` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(100) NOT NULL UNIQUE, + `description` text NULL, + `is_enabled` boolean DEFAULT TRUE, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `created_by` bigint unsigned NULL, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `updated_by` bigint unsigned NULL, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `schedule` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `workflow_id` bigint unsigned NOT NULL, + `cron_expression` varchar(50) NULL, + `parameters` json NULL, + `is_active` boolean DEFAULT TRUE, + `last_run_status` varchar(20) NULL, + `last_run_at` timestamp NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `created_by` bigint unsigned NULL, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `updated_by` bigint unsigned NULL, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `job` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(100) NOT NULL UNIQUE, + `description` text NULL, + `is_enabled` boolean DEFAULT TRUE, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `created_by` bigint unsigned NULL, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `updated_by` bigint unsigned NULL, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `task` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(100) NOT NULL UNIQUE, + `type` varchar(50) NULL, + `parameters` json NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `workflow_job` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `workflow_id` bigint unsigned NOT NULL, + `job_id` bigint unsigned NOT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_workflow_job` (`workflow_id`, `job_id`) + ); + +CREATE TABLE `job_task` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `job_id` bigint unsigned NOT NULL, + `task_id` bigint unsigned NOT NULL, + `execution_order` int NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_job_task` (`job_id`, `task_id`) + ); + +CREATE TABLE `execution_log` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `execution_type` varchar(20) NULL COMMENT 'task, schedule, job, workflow', + `source_id` bigint unsigned NULL COMMENT '๋ชจ๋“  ๋ฐ์ดํ„ฐ์— ๋Œ€ํ•œ ID ex: job_id, schedule_id, task_id, ...', + `log_level` varchar(20) NULL, + `executed_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `log_message` text NULL, + `trace_id` char(36) NULL, + `config_snapshot` json NULL, + PRIMARY KEY (`id`), + INDEX `idx_source_id_type` (`source_id`, `execution_type`) + ); + +CREATE TABLE `task_io_data` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `task_run_id` bigint unsigned NOT NULL, + `io_type` varchar(10) NOT NULL COMMENT 'INPUT, OUTPUT', + `name` varchar(100) NOT NULL COMMENT 'ํŒŒ๋ผ๋ฏธํ„ฐ/๋ณ€์ˆ˜ ์ด๋ฆ„', + `data_type` varchar(50) NOT NULL COMMENT 'string, number, json, file, etc', + `data_value` json NULL COMMENT '์‹ค์ œ ๋ฐ์ดํ„ฐ ๊ฐ’', + `data_size` bigint NULL COMMENT '๋ฐ์ดํ„ฐ ํฌ๊ธฐ (bytes)', + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + INDEX `idx_task_io_task_run_id` (`task_run_id`), + INDEX `idx_task_io_type` (`io_type`), + INDEX `idx_task_io_name` (`name`) + ); + +CREATE TABLE `config` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `target_type` varchar(50) NULL COMMENT 'user, job, workflow', + `target_id` bigint unsigned NULL, + `version` int NULL, + `json` json NULL, + `is_active` boolean DEFAULT TRUE, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `created_by` bigint unsigned NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_config_target` (`target_type`, `target_id`) + ); + +CREATE TABLE `category` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(100) NULL, + `description` text NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `user_config` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `user_id` bigint unsigned NOT NULL, + `type` varchar(50) NULL, + `name` varchar(100) NULL, + `json` json NULL, + `is_active` boolean DEFAULT TRUE, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) + ); + +-- ์ธ๋ฑ์Šค ์ถ”๊ฐ€ (์„ฑ๋Šฅ ์ตœ์ ํ™”) +CREATE INDEX `idx_schedule_workflow` ON `schedule` (`workflow_id`); +CREATE INDEX `idx_job_enabled` ON `job` (`is_enabled`); +CREATE INDEX `idx_task_type` ON `task` (`type`); +CREATE INDEX `idx_workflow_enabled` ON `workflow` (`is_enabled`); +CREATE UNIQUE INDEX `uk_schedule_workflow` ON `schedule` (`workflow_id`); +CREATE UNIQUE INDEX `uk_job_name` ON `job` (`name`); +CREATE UNIQUE INDEX `uk_task_name` ON `task` (`name`); +CREATE UNIQUE INDEX `uk_workflow_name` ON `workflow` (`name`); +CREATE INDEX `idx_user_config_user` ON `user_config` (`user_id`); + + + +-- ์›Œํฌํ”Œ๋กœ์šฐ ์‹คํ–‰ ํ…Œ์ด๋ธ” +CREATE TABLE `workflow_run` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `workflow_id` bigint unsigned NOT NULL, + `trace_id` char(36) NOT NULL, + `run_number` varchar(20) NULL, + `status` varchar(20) NULL COMMENT 'pending, running, success, failed, cancelled', + `trigger_type` varchar(20) NULL COMMENT 'manual, schedule, push, pull_request', + `started_at` timestamp NULL, + `finished_at` timestamp NULL, + `created_by` bigint unsigned NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_workflow_run_trace` (`trace_id`), + INDEX `idx_workflow_run_status` (`status`), + INDEX `idx_workflow_run_workflow_id` (`workflow_id`), + INDEX `idx_workflow_run_created_at` (`created_at`) + ); + +-- Job ์‹คํ–‰ ํ…Œ์ด๋ธ” +CREATE TABLE `job_run` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `workflow_run_id` bigint unsigned NOT NULL, + `job_id` bigint unsigned NOT NULL, + `status` varchar(20) NULL COMMENT 'pending, running, success, failed, cancelled, skipped', + `started_at` timestamp NULL, + `finished_at` timestamp NULL, + `execution_order` int NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + INDEX `idx_job_run_workflow_run_id` (`workflow_run_id`), + INDEX `idx_job_run_status` (`status`), + INDEX `idx_job_run_job_id` (`job_id`) + ); + +-- Task ์‹คํ–‰ ํ…Œ์ด๋ธ” +CREATE TABLE `task_run` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `job_run_id` bigint unsigned NOT NULL, + `task_id` bigint unsigned NOT NULL, + `status` varchar(20) NULL COMMENT 'pending, running, success, failed, cancelled, skipped', + `started_at` timestamp NULL, + `finished_at` timestamp NULL, + `execution_order` int NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + INDEX `idx_task_run_job_run_id` (`job_run_id`), + INDEX `idx_task_run_status` (`status`), + INDEX `idx_task_run_task_id` (`task_id`) + ); \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/02-insert-external-users.sql b/apps/user-service/src/main/resources/sql/02-insert-external-users.sql index f4620bbd..b38f2c47 100644 --- a/apps/user-service/src/main/resources/sql/02-insert-external-users.sql +++ b/apps/user-service/src/main/resources/sql/02-insert-external-users.sql @@ -1,86 +1,92 @@ -- B2B ํ…Œ์ŠคํŠธ์šฉ ์™ธ๋ถ€ ํšŒ์‚ฌ INSERT -- 1. ์™ธ๋ถ€ ํ…Œ์ŠคํŠธ ํšŒ์‚ฌ๋“ค -INSERT INTO `organizations` (`name`, `domain_name`) VALUES - ('ํ…Œํฌ์ด๋…ธ๋ฒ ์ด์…˜', 'techinnovation.co.kr'), - ('๋””์ง€ํ„ธ์†”๋ฃจ์…˜', 'digitalsolution.com'), - ('ํฌ๋ฆฌ์—์ดํ‹ฐ๋ธŒ์›์Šค', 'creativeworks.net'); +INSERT INTO `organization` (`name`, `domain_name`) VALUES + ('ํ…Œํฌ์ด๋…ธ๋ฒ ์ด์…˜', 'techinnovation.co.kr'), + ('๋””์ง€ํ„ธ์†”๋ฃจ์…˜', 'digitalsolution.com'), + ('ํฌ๋ฆฌ์—์ดํ‹ฐ๋ธŒ์›์Šค', 'creativeworks.net'); -- 2. ํ…Œํฌ์ด๋…ธ๋ฒ ์ด์…˜ ๋ถ€์„œ๋“ค -INSERT INTO `departments` (`organization_id`, `name`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '๊ฐœ๋ฐœํŒ€'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '๋””์ž์ธํŒ€'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '์ธ์‚ฌํŒ€'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '๋งˆ์ผ€ํŒ…ํŒ€'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '์˜์—…ํŒ€'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '์žฌ๋ฌดํŒ€'); +SET @tech_org_id = (SELECT id FROM organization WHERE domain_name = 'techinnovation.co.kr' LIMIT 1); + +INSERT INTO `department` (`organization_id`, `name`) VALUES + (@tech_org_id, '๊ฐœ๋ฐœํŒ€'), + (@tech_org_id, '๋””์ž์ธํŒ€'), + (@tech_org_id, '์ธ์‚ฌํŒ€'), + (@tech_org_id, '๋งˆ์ผ€ํŒ…ํŒ€'), + (@tech_org_id, '์˜์—…ํŒ€'), + (@tech_org_id, '์žฌ๋ฌดํŒ€'); -- 3. ๋””์ง€ํ„ธ์†”๋ฃจ์…˜ ๋ถ€์„œ๋“ค -INSERT INTO `departments` (`organization_id`, `name`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '๊ฐœ๋ฐœํŒ€'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '๊ธฐํšํŒ€'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '์šด์˜ํŒ€'); +SET @digital_org_id = (SELECT id FROM organization WHERE domain_name = 'digitalsolution.com' LIMIT 1); + +INSERT INTO `department` (`organization_id`, `name`) VALUES + (@digital_org_id, '๊ฐœ๋ฐœํŒ€'), + (@digital_org_id, '๊ธฐํšํŒ€'), + (@digital_org_id, '์šด์˜ํŒ€'); -- 4. ํฌ๋ฆฌ์—์ดํ‹ฐ๋ธŒ์›์Šค ๋ถ€์„œ๋“ค -INSERT INTO `departments` (`organization_id`, `name`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), '๋””์ž์ธํŒ€'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), '๋งˆ์ผ€ํŒ…ํŒ€'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), '์ œ์ž‘ํŒ€'); +SET @creative_org_id = (SELECT id FROM organization WHERE domain_name = 'creativeworks.net' LIMIT 1); + +INSERT INTO `department` (`organization_id`, `name`) VALUES + (@creative_org_id, '๋””์ž์ธํŒ€'), + (@creative_org_id, '๋งˆ์ผ€ํŒ…ํŒ€'), + (@creative_org_id, '์ œ์ž‘ํŒ€'); -- 5. ํ…Œํฌ์ด๋…ธ๋ฒ ์ด์…˜ ์ง์ฑ…๋“ค -INSERT INTO `positions` (`organization_id`, `title`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '์‚ฌ์›'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '์ฃผ์ž„'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '๋Œ€๋ฆฌ'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '๊ณผ์žฅ'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '์ฐจ์žฅ'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '๋ถ€์žฅ'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '์ด์‚ฌ'); +INSERT INTO `position` (`organization_id`, `title`) VALUES + (@tech_org_id, '์‚ฌ์›'), + (@tech_org_id, '์ฃผ์ž„'), + (@tech_org_id, '๋Œ€๋ฆฌ'), + (@tech_org_id, '๊ณผ์žฅ'), + (@tech_org_id, '์ฐจ์žฅ'), + (@tech_org_id, '๋ถ€์žฅ'), + (@tech_org_id, '์ด์‚ฌ'); -- 6. ๋””์ง€ํ„ธ์†”๋ฃจ์…˜ ์ง์ฑ…๋“ค -INSERT INTO `positions` (`organization_id`, `title`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '์‚ฌ์›'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '์„ ์ž„'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '์ฑ…์ž„'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '์ˆ˜์„'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), 'ํŒ€์žฅ'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '๋ณธ๋ถ€์žฅ'); +INSERT INTO `position` (`organization_id`, `title`) VALUES + (@digital_org_id, '์‚ฌ์›'), + (@digital_org_id, '์„ ์ž„'), + (@digital_org_id, '์ฑ…์ž„'), + (@digital_org_id, '์ˆ˜์„'), + (@digital_org_id, 'ํŒ€์žฅ'), + (@digital_org_id, '๋ณธ๋ถ€์žฅ'); -- 7. ํฌ๋ฆฌ์—์ดํ‹ฐ๋ธŒ์›์Šค ์ง์ฑ…๋“ค -INSERT INTO `positions` (`organization_id`, `title`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), '์ฃผ๋‹ˆ์–ด'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), '์‹œ๋‹ˆ์–ด'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), '๋ฆฌ๋“œ'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), '๋””๋ ‰ํ„ฐ'); +INSERT INTO `position` (`organization_id`, `title`) VALUES + (@creative_org_id, '์ฃผ๋‹ˆ์–ด'), + (@creative_org_id, '์‹œ๋‹ˆ์–ด'), + (@creative_org_id, '๋ฆฌ๋“œ'), + (@creative_org_id, '๋””๋ ‰ํ„ฐ'); -- 8. ์™ธ๋ถ€ ํšŒ์‚ฌ๋ณ„ ์ปค์Šคํ…€ ์—ญํ•  -- ํ…Œํฌ์ด๋…ธ๋ฒ ์ด์…˜ ์—ญํ•  -INSERT INTO `roles` (`organization_id`, `name`, `description`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), 'DEPT_MANAGER', '๋ถ€์„œ ๊ด€๋ฆฌ์ž - ๋ถ€์„œ ๋‚ด ๊ด€๋ฆฌ ๊ถŒํ•œ'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), 'TEAM_LEAD', 'ํŒ€์žฅ - ํŒ€์› ๊ด€๋ฆฌ ๋ฐ ํ”„๋กœ์ ํŠธ ๋ฆฌ๋“œ'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), 'SENIOR_DEV', '์‹œ๋‹ˆ์–ด ๊ฐœ๋ฐœ์ž - ๊ฐœ๋ฐœ ๊ด€๋ จ ๊ณ ๊ธ‰ ๊ถŒํ•œ'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), 'JUNIOR_DEV', '์ฃผ๋‹ˆ์–ด ๊ฐœ๋ฐœ์ž - ๊ฐœ๋ฐœ ๊ด€๋ จ ๊ธฐ๋ณธ ๊ถŒํ•œ'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), 'PROJECT_MANAGER', 'ํ”„๋กœ์ ํŠธ ๋งค๋‹ˆ์ € - ํ”„๋กœ์ ํŠธ ๊ด€๋ฆฌ ๊ถŒํ•œ'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), 'DESIGNER', '๋””์ž์ด๋„ˆ - ๋””์ž์ธ ๊ด€๋ จ ๊ถŒํ•œ'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), 'HR_SPECIALIST', '์ธ์‚ฌ ๋‹ด๋‹น์ž - ์ธ์‚ฌ ๊ด€๋ฆฌ ๊ถŒํ•œ'); +INSERT INTO `role` (`organization_id`, `name`, `description`) VALUES + (@tech_org_id, 'DEPT_MANAGER', '๋ถ€์„œ ๊ด€๋ฆฌ์ž - ๋ถ€์„œ ๋‚ด ๊ด€๋ฆฌ ๊ถŒํ•œ'), + (@tech_org_id, 'TEAM_LEAD', 'ํŒ€์žฅ - ํŒ€์› ๊ด€๋ฆฌ ๋ฐ ํ”„๋กœ์ ํŠธ ๋ฆฌ๋“œ'), + (@tech_org_id, 'SENIOR_DEV', '์‹œ๋‹ˆ์–ด ๊ฐœ๋ฐœ์ž - ๊ฐœ๋ฐœ ๊ด€๋ จ ๊ณ ๊ธ‰ ๊ถŒํ•œ'), + (@tech_org_id, 'JUNIOR_DEV', '์ฃผ๋‹ˆ์–ด ๊ฐœ๋ฐœ์ž - ๊ฐœ๋ฐœ ๊ด€๋ จ ๊ธฐ๋ณธ ๊ถŒํ•œ'), + (@tech_org_id, 'PROJECT_MANAGER', 'ํ”„๋กœ์ ํŠธ ๋งค๋‹ˆ์ € - ํ”„๋กœ์ ํŠธ ๊ด€๋ฆฌ ๊ถŒํ•œ'), + (@tech_org_id, 'DESIGNER', '๋””์ž์ด๋„ˆ - ๋””์ž์ธ ๊ด€๋ จ ๊ถŒํ•œ'), + (@tech_org_id, 'HR_SPECIALIST', '์ธ์‚ฌ ๋‹ด๋‹น์ž - ์ธ์‚ฌ ๊ด€๋ฆฌ ๊ถŒํ•œ'); -- ๋””์ง€ํ„ธ์†”๋ฃจ์…˜ ์—ญํ•  -INSERT INTO `roles` (`organization_id`, `name`, `description`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), 'TECH_LEAD', '๊ธฐ์ˆ  ๋ฆฌ๋“œ - ๊ธฐ์ˆ  ๊ด€๋ จ ์ด๊ด„'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), 'PRODUCT_OWNER', 'ํ”„๋กœ๋•ํŠธ ์˜ค๋„ˆ - ์ œํ’ˆ ๊ธฐํš ๊ด€๋ฆฌ'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), 'QA_ENGINEER', 'QA ์—”์ง€๋‹ˆ์–ด - ํ’ˆ์งˆ ๋ณด์ฆ'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), 'DEVOPS', 'DevOps ์—”์ง€๋‹ˆ์–ด - ์ธํ”„๋ผ ๊ด€๋ฆฌ'); +INSERT INTO `role` (`organization_id`, `name`, `description`) VALUES + (@digital_org_id, 'TECH_LEAD', '๊ธฐ์ˆ  ๋ฆฌ๋“œ - ๊ธฐ์ˆ  ๊ด€๋ จ ์ด๊ด„'), + (@digital_org_id, 'PRODUCT_OWNER', 'ํ”„๋กœ๋•ํŠธ ์˜ค๋„ˆ - ์ œํ’ˆ ๊ธฐํš ๊ด€๋ฆฌ'), + (@digital_org_id, 'QA_ENGINEER', 'QA ์—”์ง€๋‹ˆ์–ด - ํ’ˆ์งˆ ๋ณด์ฆ'), + (@digital_org_id, 'DEVOPS', 'DevOps ์—”์ง€๋‹ˆ์–ด - ์ธํ”„๋ผ ๊ด€๋ฆฌ'); -- ํฌ๋ฆฌ์—์ดํ‹ฐ๋ธŒ์›์Šค ์—ญํ•  -INSERT INTO `roles` (`organization_id`, `name`, `description`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), 'CREATIVE_DIRECTOR', 'ํฌ๋ฆฌ์—์ดํ‹ฐ๋ธŒ ๋””๋ ‰ํ„ฐ - ์ฐฝ์ž‘ ์ด๊ด„'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), 'ART_DIRECTOR', '์•„ํŠธ ๋””๋ ‰ํ„ฐ - ์˜ˆ์ˆ  ๊ฐ๋…'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), 'MOTION_DESIGNER', '๋ชจ์…˜ ๋””์ž์ด๋„ˆ - ์˜์ƒ/์• ๋‹ˆ๋ฉ”์ด์…˜'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), 'COPYWRITER', '์นดํ”ผ๋ผ์ดํ„ฐ - ์ฝ˜ํ…์ธ  ์ž‘์„ฑ'); +INSERT INTO `role` (`organization_id`, `name`, `description`) VALUES + (@creative_org_id, 'CREATIVE_DIRECTOR', 'ํฌ๋ฆฌ์—์ดํ‹ฐ๋ธŒ ๋””๋ ‰ํ„ฐ - ์ฐฝ์ž‘ ์ด๊ด„'), + (@creative_org_id, 'ART_DIRECTOR', '์•„ํŠธ ๋””๋ ‰ํ„ฐ - ์˜ˆ์ˆ  ๊ฐ๋…'), + (@creative_org_id, 'MOTION_DESIGNER', '๋ชจ์…˜ ๋””์ž์ด๋„ˆ - ์˜์ƒ/์• ๋‹ˆ๋ฉ”์ด์…˜'), + (@creative_org_id, 'COPYWRITER', '์นดํ”ผ๋ผ์ดํ„ฐ - ์ฝ˜ํ…์ธ  ์ž‘์„ฑ'); -- 9. ์™ธ๋ถ€ ํšŒ์‚ฌ ํ…Œ์ŠคํŠธ ์‚ฌ์šฉ์ž๋“ค -INSERT INTO `users` (`name`, `email`, `password`, `status`) VALUES +INSERT INTO `user` (`name`, `email`, `password`, `status`) VALUES -- ํ…Œํฌ์ด๋…ธ๋ฒ ์ด์…˜ ์ง์› ('๊น€์ฒ ์ˆ˜', 'chulsoo.kim@techinnovation.co.kr', '$2a$10$encrypted_password_hash11', 'ACTIVE'), ('์ด์˜ํฌ', 'younghee.lee@techinnovation.co.kr', '$2a$10$encrypted_password_hash12', 'ACTIVE'), @@ -94,119 +100,80 @@ INSERT INTO `users` (`name`, `email`, `password`, `status`) VALUES ('ํ™์ง€์•„', 'jia.hong@creativeworks.net', '$2a$10$encrypted_password_hash16', 'ACTIVE'); -- 10. ์™ธ๋ถ€ ํšŒ์‚ฌ ์‚ฌ์šฉ์ž-์กฐ์ง ์—ฐ๊ฒฐ -INSERT INTO `user_organizations` (`user_id`, `organization_id`, `position_id`, `department_id`, `employee_number`, `status`) VALUES +INSERT INTO `user_organization` (`user_id`, `organization_id`, `position_id`, `department_id`, `employee_number`, `status`) VALUES -- ํ…Œํฌ์ด๋…ธ๋ฒ ์ด์…˜ ์ง์›๋“ค --- ๊น€์ฒ ์ˆ˜ - ๊ฐœ๋ฐœํŒ€ ๊ณผ์žฅ -((SELECT id FROM users WHERE email = 'chulsoo.kim@techinnovation.co.kr'), - (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), - (SELECT id FROM positions WHERE title = '๊ณผ์žฅ' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), - (SELECT id FROM departments WHERE name = '๊ฐœ๋ฐœํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), - 'DEV25001', 'ACTIVE'), - --- ์ด์˜ํฌ - ๋””์ž์ธํŒ€ ๋Œ€๋ฆฌ -((SELECT id FROM users WHERE email = 'younghee.lee@techinnovation.co.kr'), - (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), - (SELECT id FROM positions WHERE title = '๋Œ€๋ฆฌ' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), - (SELECT id FROM departments WHERE name = '๋””์ž์ธํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), - 'DES25001', 'ACTIVE'), - --- ๋ฐ•๋ฏผ์ˆ˜ - ์ธ์‚ฌํŒ€ ์ฐจ์žฅ -((SELECT id FROM users WHERE email = 'minsu.park@techinnovation.co.kr'), - (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), - (SELECT id FROM positions WHERE title = '์ฐจ์žฅ' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), - (SELECT id FROM departments WHERE name = '์ธ์‚ฌํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), - 'HR25001', 'ACTIVE'), +((SELECT id FROM user WHERE email = 'chulsoo.kim@techinnovation.co.kr'), @tech_org_id, (SELECT id FROM position WHERE title = '๊ณผ์žฅ' AND organization_id = @tech_org_id), (SELECT id FROM department WHERE name = '๊ฐœ๋ฐœํŒ€' AND organization_id = @tech_org_id), 'DEV25001', 'ACTIVE'), +((SELECT id FROM user WHERE email = 'younghee.lee@techinnovation.co.kr'), @tech_org_id, (SELECT id FROM position WHERE title = '๋Œ€๋ฆฌ' AND organization_id = @tech_org_id), (SELECT id FROM department WHERE name = '๋””์ž์ธํŒ€' AND organization_id = @tech_org_id), 'DES25001', 'ACTIVE'), +((SELECT id FROM user WHERE email = 'minsu.park@techinnovation.co.kr'), @tech_org_id, (SELECT id FROM position WHERE title = '์ฐจ์žฅ' AND organization_id = @tech_org_id), (SELECT id FROM department WHERE name = '์ธ์‚ฌํŒ€' AND organization_id = @tech_org_id), 'HR25001', 'ACTIVE'), -- ๋””์ง€ํ„ธ์†”๋ฃจ์…˜ ์ง์›๋“ค --- ์ •์ˆ˜์—ฐ - ๊ฐœ๋ฐœํŒ€ ํŒ€์žฅ -((SELECT id FROM users WHERE email = 'sooyeon.jung@digitalsolution.com'), - (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), - (SELECT id FROM positions WHERE title = 'ํŒ€์žฅ' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com')), - (SELECT id FROM departments WHERE name = '๊ฐœ๋ฐœํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com')), - 'DEV25001', 'ACTIVE'), - --- ์ตœํ˜„์šฐ - ๊ธฐํšํŒ€ ์ฑ…์ž„ -((SELECT id FROM users WHERE email = 'hyunwoo.choi@digitalsolution.com'), - (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), - (SELECT id FROM positions WHERE title = '์ฑ…์ž„' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com')), - (SELECT id FROM departments WHERE name = '๊ธฐํšํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com')), - 'PLN25001', 'ACTIVE'), +((SELECT id FROM user WHERE email = 'sooyeon.jung@digitalsolution.com'), @digital_org_id, (SELECT id FROM position WHERE title = 'ํŒ€์žฅ' AND organization_id = @digital_org_id), (SELECT id FROM department WHERE name = '๊ฐœ๋ฐœํŒ€' AND organization_id = @digital_org_id), 'DEV25001', 'ACTIVE'), +((SELECT id FROM user WHERE email = 'hyunwoo.choi@digitalsolution.com'), @digital_org_id, (SELECT id FROM position WHERE title = '์ฑ…์ž„' AND organization_id = @digital_org_id), (SELECT id FROM department WHERE name = '๊ธฐํšํŒ€' AND organization_id = @digital_org_id), 'PLN25001', 'ACTIVE'), -- ํฌ๋ฆฌ์—์ดํ‹ฐ๋ธŒ์›์Šค ์ง์› --- ํ™์ง€์•„ - ๋””์ž์ธํŒ€ ๋ฆฌ๋“œ -((SELECT id FROM users WHERE email = 'jia.hong@creativeworks.net'), - (SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), - (SELECT id FROM positions WHERE title = '๋ฆฌ๋“œ' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'creativeworks.net')), - (SELECT id FROM departments WHERE name = '๋””์ž์ธํŒ€' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'creativeworks.net')), - 'DES25001', 'ACTIVE'); +((SELECT id FROM user WHERE email = 'jia.hong@creativeworks.net'), @creative_org_id, (SELECT id FROM position WHERE title = '๋ฆฌ๋“œ' AND organization_id = @creative_org_id), (SELECT id FROM department WHERE name = '๋””์ž์ธํŒ€' AND organization_id = @creative_org_id), 'DES25001', 'ACTIVE'); -- 11. ์™ธ๋ถ€ ํšŒ์‚ฌ ์‚ฌ์šฉ์ž๋ณ„ ์—ญํ•  ํ• ๋‹น -- ํ…Œํฌ์ด๋…ธ๋ฒ ์ด์…˜ --- ๊น€์ฒ ์ˆ˜์—๊ฒŒ DEPT_MANAGER ์—ญํ•  -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'DEPT_MANAGER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), + (SELECT id FROM role WHERE name = 'DEPT_MANAGER' AND organization_id = @tech_org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'chulsoo.kim@techinnovation.co.kr'; --- ์ด์˜ํฌ์—๊ฒŒ DESIGNER ์—ญํ•  -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'DESIGNER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), + (SELECT id FROM role WHERE name = 'DESIGNER' AND organization_id = @tech_org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'younghee.lee@techinnovation.co.kr'; --- ๋ฐ•๋ฏผ์ˆ˜์—๊ฒŒ HR_SPECIALIST ์—ญํ•  -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'HR_SPECIALIST' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), + (SELECT id FROM role WHERE name = 'HR_SPECIALIST' AND organization_id = @tech_org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'minsu.park@techinnovation.co.kr'; -- ๋””์ง€ํ„ธ์†”๋ฃจ์…˜ --- ์ •์ˆ˜์—ฐ์—๊ฒŒ TECH_LEAD ์—ญํ•  -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'TECH_LEAD' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com')), + (SELECT id FROM role WHERE name = 'TECH_LEAD' AND organization_id = @digital_org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'sooyeon.jung@digitalsolution.com'; --- ์ตœํ˜„์šฐ์—๊ฒŒ PRODUCT_OWNER ์—ญํ•  -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'PRODUCT_OWNER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com')), + (SELECT id FROM role WHERE name = 'PRODUCT_OWNER' AND organization_id = @digital_org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'hyunwoo.choi@digitalsolution.com'; -- ํฌ๋ฆฌ์—์ดํ‹ฐ๋ธŒ์›์Šค --- ํ™์ง€์•„์—๊ฒŒ CREATIVE_DIRECTOR ์—ญํ•  -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'CREATIVE_DIRECTOR' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'creativeworks.net')), + (SELECT id FROM role WHERE name = 'CREATIVE_DIRECTOR' AND organization_id = @creative_org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'jia.hong@creativeworks.net'; -- 12. ์™ธ๋ถ€ ํšŒ์‚ฌ ์—ญํ• ๋ณ„ ๊ธฐ๋ณธ ๊ถŒํ•œ ํ• ๋‹น (์ƒ˜ํ”Œ) -- DEPT_MANAGER ๊ถŒํ•œ -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'DEPT_MANAGER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), + (SELECT id FROM role WHERE name = 'DEPT_MANAGER' AND organization_id = @tech_org_id), id -FROM permissions +FROM permission WHERE resource IN ( 'users.read.department', 'users.update', 'users.invite', 'departments.read', 'departments.manage', @@ -216,11 +183,11 @@ WHERE resource IN ( ); -- TECH_LEAD ๊ถŒํ•œ -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'TECH_LEAD' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com')), + (SELECT id FROM role WHERE name = 'TECH_LEAD' AND organization_id = @digital_org_id), id -FROM permissions +FROM permission WHERE resource LIKE 'ai.%' OR resource LIKE 'workflows.%' OR resource IN ( @@ -230,11 +197,11 @@ WHERE resource LIKE 'ai.%' ); -- CREATIVE_DIRECTOR ๊ถŒํ•œ -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'CREATIVE_DIRECTOR' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'creativeworks.net')), + (SELECT id FROM role WHERE name = 'CREATIVE_DIRECTOR' AND organization_id = @creative_org_id), id -FROM permissions +FROM permission WHERE resource LIKE 'content.%' OR resource LIKE 'campaigns.%' OR resource IN ( diff --git a/apps/user-service/src/main/resources/sql/schema.sql b/apps/user-service/src/main/resources/sql/schema.sql deleted file mode 100644 index e2a9a917..00000000 --- a/apps/user-service/src/main/resources/sql/schema.sql +++ /dev/null @@ -1,256 +0,0 @@ --- MariaDB ์ตœ์ ํ™”๋œ ์Šคํ‚ค๋งˆ (์†Œ๋ฌธ์ž, VARCHAR ํฌ๊ธฐ ์ง€์ •) -CREATE TABLE IF NOT EXISTS `permissions` ( - `id` int unsigned NOT NULL AUTO_INCREMENT, - `resource` varchar(100) NULL, - `description` varchar(255) NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `is_active` boolean DEFAULT TRUE, - `updated_by` bigint unsigned NULL, - `created_by` bigint unsigned NULL, - PRIMARY KEY (`id`) -); - -CREATE TABLE IF NOT EXISTS `organizations` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `name` varchar(150) NULL, - `domain_name` varchar(100) NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`) -); - -CREATE TABLE IF NOT EXISTS `roles` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `organization_id` bigint unsigned NULL, - `name` varchar(100) NULL, - `description` varchar(500) NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_organizations_to_roles` FOREIGN KEY (`organization_id`) - REFERENCES `organizations` (`id`) ON DELETE SET NULL -); - -CREATE TABLE IF NOT EXISTS `users` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `name` varchar(50) NULL, - `email` varchar(100) NULL, - `password` varchar(255) NULL, - `status` varchar(20) NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`) -); - -CREATE TABLE IF NOT EXISTS `departments` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `organization_id` bigint unsigned NOT NULL, - `name` varchar(100) NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_organizations_to_departments` FOREIGN KEY (`organization_id`) REFERENCES `organizations` (`id`) -); - -CREATE TABLE IF NOT EXISTS `positions` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `organization_id` bigint unsigned NOT NULL, - `title` varchar(100) NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_organizations_to_positions` FOREIGN KEY (`organization_id`) REFERENCES `organizations` (`id`) -); - -CREATE TABLE IF NOT EXISTS `user_organizations` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `user_id` bigint unsigned NOT NULL, - `organization_id` bigint unsigned NOT NULL, - `position_id` bigint unsigned NOT NULL, - `department_id` bigint unsigned NOT NULL, - `employee_number` varchar(50) NULL, - `status` varchar(20) NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`), - CONSTRAINT `fk_users_to_user_organizations` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`), - CONSTRAINT `fk_organizations_to_user_organizations` FOREIGN KEY (`organization_id`) REFERENCES `organizations` (`id`), - CONSTRAINT `fk_positions_to_user_organizations` FOREIGN KEY (`position_id`) REFERENCES `positions` (`id`), - CONSTRAINT `fk_departments_to_user_organizations` FOREIGN KEY (`department_id`) REFERENCES `departments` (`id`) -); - -CREATE TABLE IF NOT EXISTS `role_permissions` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `role_id` bigint unsigned NOT NULL, - `permission_id` int unsigned NOT NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_roles_to_role_permissions` FOREIGN KEY (`role_id`) REFERENCES `roles` (`id`), - CONSTRAINT `fk_permissions_to_role_permissions` FOREIGN KEY (`permission_id`) REFERENCES `permissions` (`id`), - UNIQUE KEY `uk_role_permission` (`role_id`, `permission_id`) -); - -CREATE TABLE IF NOT EXISTS `user_roles` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `role_id` bigint unsigned NOT NULL, - `user_organization_id` bigint unsigned NOT NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_roles_to_user_roles` FOREIGN KEY (`role_id`) REFERENCES `roles` (`id`), - CONSTRAINT `fk_user_organizations_to_user_roles` FOREIGN KEY (`user_organization_id`) REFERENCES `user_organizations` (`id`), - UNIQUE KEY `uk_user_role` (`role_id`, `user_organization_id`) -); - --- ์„ฑ๋Šฅ ์ตœ์ ํ™”๋ฅผ ์œ„ํ•œ ์ธ๋ฑ์Šค -CREATE INDEX IF NOT EXISTS - `idx_users_email` ON `users` (`email`); -CREATE INDEX IF NOT EXISTS - `idx_users_status` ON `users` (`status`); -CREATE INDEX IF NOT EXISTS - `idx_user_organizations_user` ON `user_organizations` (`user_id`); -CREATE INDEX IF NOT EXISTS - `idx_user_organizations_org` ON `user_organizations` (`organization_id`); -CREATE INDEX IF NOT EXISTS - `idx_user_organizations_status` ON `user_organizations` (`status`); -CREATE INDEX IF NOT EXISTS - `idx_roles_org` ON `roles` (`organization_id`); -CREATE INDEX IF NOT EXISTS - `idx_permissions_resource` ON `permissions` (`resource`); -CREATE INDEX IF NOT EXISTS - `idx_permissions_active` ON `permissions` (`is_active`); - - - -CREATE TABLE IF NOT EXISTS `workflows` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `name` varchar(100) NOT NULL UNIQUE, - `description` text NULL, - `is_enabled` boolean DEFAULT TRUE, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `created_by` bigint unsigned NULL, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `updated_by` bigint unsigned NULL, - PRIMARY KEY (`id`) - ); - -CREATE TABLE IF NOT EXISTS `schedules` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `workflow_id` bigint unsigned NOT NULL, - `cron_expression` varchar(50) NULL, - `parameters` json NULL, - `is_active` boolean DEFAULT TRUE, - `last_run_status` varchar(20) NULL, - `last_run_at` timestamp NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `created_by` bigint unsigned NULL, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `updated_by` bigint unsigned NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_schedules_to_workflows` FOREIGN KEY (`workflow_id`) REFERENCES `workflows` (`id`) - ); - -CREATE TABLE IF NOT EXISTS `jobs` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `name` varchar(100) NOT NULL UNIQUE, - `description` text NULL, - `is_enabled` boolean DEFAULT TRUE, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `created_by` bigint unsigned NULL, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `updated_by` bigint unsigned NULL, - PRIMARY KEY (`id`) - ); - -CREATE TABLE IF NOT EXISTS `tasks` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `name` varchar(100) NOT NULL UNIQUE, - `type` varchar(50) NULL, - `parameters` json NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`) - ); - -CREATE TABLE IF NOT EXISTS `workflow_jobs` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `workflow_id` bigint unsigned NOT NULL, - `job_id` bigint unsigned NOT NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_workflow_jobs_to_workflows` FOREIGN KEY (`workflow_id`) REFERENCES `workflows` (`id`), - CONSTRAINT `fk_workflow_jobs_to_jobs` FOREIGN KEY (`job_id`) REFERENCES `jobs` (`id`), - UNIQUE KEY `uk_workflow_job` (`workflow_id`, `job_id`) - ); - -CREATE TABLE IF NOT EXISTS `job_tasks` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `job_id` bigint unsigned NOT NULL, - `task_id` bigint unsigned NOT NULL, - `execution_order` int NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_job_tasks_to_jobs` FOREIGN KEY (`job_id`) REFERENCES `jobs` (`id`), - CONSTRAINT `fk_job_tasks_to_tasks` FOREIGN KEY (`task_id`) REFERENCES `tasks` (`id`), - UNIQUE KEY `uk_job_task` (`job_id`, `task_id`) - ); - -CREATE TABLE IF NOT EXISTS `execution_logs` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `execution_type` varchar(20) NULL COMMENT 'task, schedule, job, workflow', - `source_id` bigint unsigned NULL COMMENT '๋ชจ๋“  ๋ฐ์ดํ„ฐ์— ๋Œ€ํ•œ ID ex: job_id, schedule_id, task_id, ...', - `log_level` varchar(20) NULL, - `executed_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `log_message` text NULL, - `trace_id` char(36) NULL, - `config_snapshot` json NULL, - PRIMARY KEY (`id`), - INDEX `idx_source_id_type` (`source_id`, `execution_type`) - ); - -CREATE TABLE IF NOT EXISTS `task_io_data` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `trace_id` char(36) NULL, - `io_type` varchar(10) NULL COMMENT 'INPUT, OUTPUT', - `name` varchar(100) NULL, - `data_type` varchar(50) NULL, - `data_value` json NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - PRIMARY KEY (`id`), - INDEX `idx_trace_id` (`trace_id`) - ); - -CREATE TABLE IF NOT EXISTS `configs` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `target_type` varchar(50) NULL COMMENT 'user, job, workflow', - `target_id` bigint unsigned NULL, - `version` int NULL, - `json` json NULL, - `is_active` boolean DEFAULT TRUE, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `created_by` bigint unsigned NULL, - PRIMARY KEY (`id`), - UNIQUE KEY `uk_config_target` (`target_type`, `target_id`) - ); - -CREATE TABLE IF NOT EXISTS `categories` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `name` varchar(100) NULL, - `description` text NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`) - ); - -CREATE TABLE IF NOT EXISTS `user_configs` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `user_id` bigint unsigned NOT NULL, - `type` varchar(50) NULL, - `name` varchar(100) NULL, - `json` json NULL, - `is_active` boolean DEFAULT TRUE, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`) - ); - --- ์ธ๋ฑ์Šค ์ถ”๊ฐ€ (์„ฑ๋Šฅ ์ตœ์ ํ™”) -CREATE INDEX IF NOT EXISTS `idx_schedules_workflow` ON `schedules` (`workflow_id`); -CREATE INDEX IF NOT EXISTS `idx_jobs_enabled` ON `jobs` (`is_enabled`); -CREATE INDEX IF NOT EXISTS `idx_tasks_type` ON `tasks` (`type`); -CREATE INDEX IF NOT EXISTS `idx_workflows_enabled` ON `workflows` (`is_enabled`); -CREATE UNIQUE INDEX IF NOT EXISTS `uk_schedules_workflow` ON `schedules` (`workflow_id`); -CREATE UNIQUE INDEX IF NOT EXISTS `uk_job_name` ON `jobs` (`name`); -CREATE UNIQUE INDEX IF NOT EXISTS `uk_task_name` ON `tasks` (`name`); -CREATE UNIQUE INDEX IF NOT EXISTS `uk_workflows_name` ON `workflows` (`name`); -CREATE INDEX IF NOT EXISTS `idx_user_configs_user` ON `user_configs` (`user_id`); \ No newline at end of file diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/DatabaseConnectionTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/DatabaseConnectionTest.java deleted file mode 100644 index e744873b..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/DatabaseConnectionTest.java +++ /dev/null @@ -1,81 +0,0 @@ -// package com.gltkorea.icebang; -// -// import static org.assertj.core.api.Assertions.assertThat; -// -// import java.sql.Connection; -// import java.sql.SQLException; -// import java.util.Optional; -// -// import javax.sql.DataSource; -// -// import org.junit.jupiter.api.DisplayName; -// import org.junit.jupiter.api.Test; -// import org.springframework.beans.factory.annotation.Autowired; -// import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase; -// import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase.Replace; -// import org.springframework.boot.test.context.SpringBootTest; -// import org.springframework.context.annotation.Import; -// import org.springframework.test.context.ActiveProfiles; -// import org.springframework.test.context.jdbc.Sql; -// import org.springframework.transaction.annotation.Transactional; -// -// import com.gltkorea.icebang.dto.UserDto; -// import com.gltkorea.icebang.mapper.UserMapper; -// -// @SpringBootTest -// @Import(TestcontainersConfiguration.class) -// @AutoConfigureTestDatabase(replace = Replace.NONE) -// @ActiveProfiles("test") // application-test-unit.yml ์„ค์ •์„ ํ™œ์„ฑํ™” -// @Transactional // ํ…Œ์ŠคํŠธ ํ›„ ๋ฐ์ดํ„ฐ ๋กค๋ฐฑ -// @Sql( -// scripts = {"classpath:sql/create-schema.sql", "classpath:sql/insert-user-data.sql"}, -// executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) -// class DatabaseConnectionTest { -// -// @Autowired private DataSource dataSource; -// -// @Autowired private UserMapper userMapper; // JPA Repository ๋Œ€์‹  MyBatis Mapper๋ฅผ ์ฃผ์ž… -// -// @Test -// @DisplayName("DataSource๋ฅผ ํ†ตํ•ด DB ์ปค๋„ฅ์…˜์„ ์„ฑ๊ณต์ ์œผ๋กœ ์–ป์„ ์ˆ˜ ์žˆ๋‹ค.") -// void canGetDatabaseConnection() { -// try (Connection connection = dataSource.getConnection()) { -// assertThat(connection).isNotNull(); -// assertThat(connection.isValid(1)).isTrue(); -// System.out.println("DB Connection successful: " + connection.getMetaData().getURL()); -// } catch (SQLException e) { -// org.junit.jupiter.api.Assertions.fail("Failed to get database connection", e); -// } -// } -// -// @Test -// @DisplayName("MyBatis Mapper๋ฅผ ํ†ตํ•ด 'ํ™๊ธธ๋™' ์‚ฌ์šฉ์ž๋ฅผ ์ด๋ฉ”์ผ๋กœ ์กฐํšŒ") -// void findUserByEmailWithMyBatis() { -// // given -// String testEmail = "hong.gildong@example.com"; -// -// // when -// Optional foundUser = userMapper.findByEmail(testEmail); -// -// // then -// // ์‚ฌ์šฉ์ž๊ฐ€ ์กด์žฌํ•˜๊ณ , ์ด๋ฆ„์ด 'ํ™๊ธธ๋™'์ธ์ง€ ํ™•์ธ -// assertThat(foundUser).isPresent(); -// assertThat(foundUser.get().getName()).isEqualTo("ํ™๊ธธ๋™"); -// System.out.println("Successfully found user with MyBatis: " + foundUser.get().getName()); -// } -// -// @Test -// @DisplayName("์ƒ˜ํ”Œ ๋ฐ์ดํ„ฐ๊ฐ€ ์˜ฌ๋ฐ”๋ฅด๊ฒŒ ์‚ฝ์ž…๋˜์—ˆ๋Š”์ง€ ํ™•์ธ") -// void verifyAllSampleDataInserted() { -// // ์‚ฌ์šฉ์ž ๋ฐ์ดํ„ฐ ํ™•์ธ -// Optional hong = userMapper.findByEmail("hong.gildong@example.com"); -// assertThat(hong).isPresent(); -// assertThat(hong.get().getName()).isEqualTo("ํ™๊ธธ๋™"); -// -// Optional kim = userMapper.findByEmail("kim.chulsu@example.com"); -// assertThat(kim).isPresent(); -// assertThat(kim.get().getName()).isEqualTo("๊น€์ฒ ์ˆ˜"); -// -// System.out.println("์ƒ˜ํ”Œ ๋ฐ์ดํ„ฐ ์‚ฝ์ž… ์„ฑ๊ณต - ํ™๊ธธ๋™, ๊น€์ฒ ์ˆ˜ ํ™•์ธ"); -// } -// } diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/UserServiceApplicationTests.java b/apps/user-service/src/test/java/com/gltkorea/icebang/UserServiceApplicationTests.java deleted file mode 100644 index 26cfc86b..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/UserServiceApplicationTests.java +++ /dev/null @@ -1,13 +0,0 @@ -package com.gltkorea.icebang; - -import org.junit.jupiter.api.Test; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.context.annotation.Import; - -@Import(TestcontainersConfiguration.class) -@SpringBootTest -class UserServiceApplicationTests { - - @Test - void contextLoads() {} -} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/controller/TestController.java b/apps/user-service/src/test/java/com/gltkorea/icebang/controller/TestController.java deleted file mode 100644 index c29707ce..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/controller/TestController.java +++ /dev/null @@ -1,15 +0,0 @@ -package com.gltkorea.icebang.controller; - -import org.springframework.boot.test.context.TestComponent; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.RestController; - -@TestComponent -@RestController -public class TestController { - - @GetMapping("/api/health") - public String health() { - return "OK"; - } -} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupport.java b/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupport.java deleted file mode 100644 index ddb3afd9..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupport.java +++ /dev/null @@ -1,28 +0,0 @@ -package com.gltkorea.icebang.support; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.boot.test.web.client.TestRestTemplate; -import org.springframework.boot.test.web.server.LocalServerPort; -import org.springframework.context.annotation.Import; - -import com.gltkorea.icebang.annotation.E2eTest; -import com.gltkorea.icebang.config.E2eTestConfiguration; - -@Import(E2eTestConfiguration.class) -@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) -@E2eTest -public abstract class E2eTestSupport { - - @LocalServerPort protected int port; - - @Autowired protected TestRestTemplate restTemplate; - - protected String getBaseUrl() { - return "http://localhost:" + port; - } - - protected String getApiUrl(String path) { - return getBaseUrl() + "/api" + path; - } -} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupportTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupportTest.java deleted file mode 100644 index bad5a2ba..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupportTest.java +++ /dev/null @@ -1,29 +0,0 @@ -package com.gltkorea.icebang.support; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.junit.jupiter.api.Test; - -class E2eTestSupportTest extends E2eTestSupport { - - @Test - void shouldStartWithRandomPort() { - // ํฌํŠธ๊ฐ€ ์ œ๋Œ€๋กœ ํ• ๋‹น๋˜์—ˆ๋Š”์ง€ ํ™•์ธ - assertThat(port).isGreaterThan(0); - assertThat(getBaseUrl()).startsWith("http://localhost:"); - assertThat(getApiUrl("/test")).contains("/api/test"); - } - - @Test - void shouldHaveRestTemplate() { - // RestTemplate์ด ์ฃผ์ž…๋˜์—ˆ๋Š”์ง€ ํ™•์ธ - assertThat(restTemplate).isNotNull(); - } - - @Test - void shouldConnectToMariaDBContainer() { - // ์‹ค์ œ DB ์—ฐ๊ฒฐ ํ™•์ธ - String response = restTemplate.getForObject(getApiUrl("/health"), String.class); - // health check endpoint๊ฐ€ ์žˆ๋‹ค๋ฉด ์‚ฌ์šฉ, ์—†์œผ๋ฉด ๊ฐ„๋‹จํ•œ ์ปจํŠธ๋กค๋Ÿฌ ๋งŒ๋“ค์–ด์„œ ํ…Œ์ŠคํŠธ - } -} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/support/UnitTestSupportTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/support/UnitTestSupportTest.java deleted file mode 100644 index 232a2c1f..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/support/UnitTestSupportTest.java +++ /dev/null @@ -1,41 +0,0 @@ -package com.gltkorea.icebang.support; - -import static org.assertj.core.api.Assertions.assertThat; - -import java.sql.Connection; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; - -import javax.sql.DataSource; - -import org.junit.jupiter.api.Test; -import org.springframework.beans.factory.annotation.Autowired; - -class UnitTestSupportTest extends UnitTestSupport { - - @Autowired private DataSource dataSource; - - @Test - void shouldUseH2DatabaseWithMariaDBMode() throws SQLException { - try (Connection connection = dataSource.getConnection()) { - String url = connection.getMetaData().getURL(); - assertThat(url).contains("h2:mem:testdb"); - - // MariaDB ๋ชจ๋“œ ํ™•์ธ - Statement stmt = connection.createStatement(); - ResultSet rs = - stmt.executeQuery( - "SELECT SETTING_VALUE FROM INFORMATION_SCHEMA.SETTINGS WHERE SETTING_NAME = 'MODE'"); - if (rs.next()) { - assertThat(rs.getString(1)).isEqualTo("MariaDB"); - } - } - } - - @Test - void shouldLoadApplicationContext() { - // Spring Context ๋กœ๋”ฉ ํ™•์ธ - assertThat(dataSource).isNotNull(); - } -} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/TestUserServiceApplication.java b/apps/user-service/src/test/java/site/icebang/TestUserServiceApplication.java similarity index 90% rename from apps/user-service/src/test/java/com/gltkorea/icebang/TestUserServiceApplication.java rename to apps/user-service/src/test/java/site/icebang/TestUserServiceApplication.java index f53fa0a9..ba8c2403 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/TestUserServiceApplication.java +++ b/apps/user-service/src/test/java/site/icebang/TestUserServiceApplication.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang; +package site.icebang; import org.springframework.boot.SpringApplication; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/TestcontainersConfiguration.java b/apps/user-service/src/test/java/site/icebang/TestcontainersConfiguration.java similarity index 83% rename from apps/user-service/src/test/java/com/gltkorea/icebang/TestcontainersConfiguration.java rename to apps/user-service/src/test/java/site/icebang/TestcontainersConfiguration.java index bbe8ed02..b9eb7b76 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/TestcontainersConfiguration.java +++ b/apps/user-service/src/test/java/site/icebang/TestcontainersConfiguration.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang; +package site.icebang; import org.springframework.boot.test.context.TestConfiguration; diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/ContextLoadE2eTests.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/ContextLoadE2eTests.java new file mode 100644 index 00000000..29e5857c --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/ContextLoadE2eTests.java @@ -0,0 +1,11 @@ +package site.icebang.e2e.scenario; + +import org.junit.jupiter.api.Test; + +import site.icebang.e2e.setup.support.E2eTestSupport; + +class ContextLoadE2eTests extends E2eTestSupport { + + @Test + void contextLoads() {} +} diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java new file mode 100644 index 00000000..a873d2d5 --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java @@ -0,0 +1,299 @@ +package site.icebang.e2e.scenario; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.http.*; +import org.springframework.test.context.jdbc.Sql; + +import site.icebang.e2e.setup.support.E2eTestSupport; + +@Sql( + value = "classpath:sql/01-insert-internal-users.sql", + executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS) +@DisplayName("์‚ฌ์šฉ์ž ๋“ฑ๋ก ํ”Œ๋กœ์šฐ E2E ํ…Œ์ŠคํŠธ") +class UserRegistrationFlowE2eTest extends E2eTestSupport { + + @SuppressWarnings("unchecked") + @Test + @DisplayName("๊ด€๋ฆฌ์ž๊ฐ€ ์ƒˆ ์‚ฌ์šฉ์ž๋ฅผ ๋“ฑ๋กํ•˜๋Š” ์ „์ฒด ํ”Œ๋กœ์šฐ (ERP ์‹œ๋‚˜๋ฆฌ์˜ค)") + void completeUserRegistrationFlow() throws Exception { + logStep(1, "๊ด€๋ฆฌ์ž ๋กœ๊ทธ์ธ (์ตœ์šฐ์„ )"); + + // 1. ๊ด€๋ฆฌ์ž ๋กœ๊ทธ์ธ (ERP์—์„œ ๋ชจ๋“  ์ž‘์—…์˜ ์„ ํ–‰ ์กฐ๊ฑด) + Map loginRequest = new HashMap<>(); + loginRequest.put("email", "admin@icebang.site"); + loginRequest.put("password", "qwer1234!A"); + + HttpHeaders loginHeaders = new HttpHeaders(); + loginHeaders.setContentType(MediaType.APPLICATION_JSON); + loginHeaders.set("Origin", "https://admin.icebang.site"); + loginHeaders.set("Referer", "https://admin.icebang.site/"); + + HttpEntity> loginEntity = new HttpEntity<>(loginRequest, loginHeaders); + + ResponseEntity loginResponse = + restTemplate.postForEntity(getV0ApiUrl("/auth/login"), loginEntity, Map.class); + + assertThat(loginResponse.getStatusCode()).isEqualTo(HttpStatus.OK); + assertThat((Boolean) loginResponse.getBody().get("success")).isTrue(); + + logSuccess("๊ด€๋ฆฌ์ž ๋กœ๊ทธ์ธ ์„ฑ๊ณต - ์ด์ œ ๋ชจ๋“  ๋ฆฌ์†Œ์Šค ์ ‘๊ทผ ๊ฐ€๋Šฅ"); + + logStep(2, "์กฐ์ง ๋ชฉ๋ก ์กฐํšŒ (์ธ์ฆ๋œ ์ƒํƒœ)"); + + // 2. ์กฐ์ง ๋ชฉ๋ก ์กฐํšŒ (๋กœ๊ทธ์ธ ํ›„ ๊ฐ€๋Šฅ) + ResponseEntity organizationsResponse = + restTemplate.getForEntity(getV0ApiUrl("/organizations"), Map.class); + + assertThat(organizationsResponse.getStatusCode()).isEqualTo(HttpStatus.OK); + assertThat((Boolean) organizationsResponse.getBody().get("success")).isTrue(); + assertThat(organizationsResponse.getBody().get("data")).isNotNull(); + + logSuccess("์กฐ์ง ๋ชฉ๋ก ์กฐํšŒ ์„ฑ๊ณต"); + + logStep(3, "๋ถ€์„œ ๋ฐ ๊ฐ์ข… ๋ฐ์ดํ„ฐ ์กฐํšŒ (ํŠน์ • ์กฐ์ง ์˜ต์…˜)"); + + // 3. ํŠน์ • ์กฐ์ง์˜ ๋ถ€์„œ, ์ง๊ธ‰, ์—ญํ•  ๋ฐ์ดํ„ฐ ์กฐํšŒ + ResponseEntity optionsResponse = + restTemplate.getForEntity(getV0ApiUrl("/organizations/1/options"), Map.class); + + assertThat(optionsResponse.getStatusCode()).isEqualTo(HttpStatus.OK); + assertThat((Boolean) optionsResponse.getBody().get("success")).isTrue(); + + Map optionData = (Map) optionsResponse.getBody().get("data"); + assertThat(optionData.get("departments")).isNotNull(); + assertThat(optionData.get("positions")).isNotNull(); + assertThat(optionData.get("roles")).isNotNull(); + + logSuccess("๋ถ€์„œ ๋ฐ ๊ฐ์ข… ๋ฐ์ดํ„ฐ ์กฐํšŒ ์„ฑ๊ณต"); + + // ์กฐํšŒ๋œ ๋ฐ์ดํ„ฐ ๋กœ๊น… (ERP ๊ด€์ ์—์„œ ์ค‘์š”ํ•œ ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ) + System.out.println("๐Ÿ“Š ์กฐํšŒ๋œ ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ:"); + System.out.println( + " - ๋ถ€์„œ: " + ((java.util.List) optionData.get("departments")).size() + "๊ฐœ"); + System.out.println( + " - ์ง๊ธ‰: " + ((java.util.List) optionData.get("positions")).size() + "๊ฐœ"); + System.out.println(" - ์—ญํ• : " + ((java.util.List) optionData.get("roles")).size() + "๊ฐœ"); + + logStep(4, "์ƒˆ ์‚ฌ์šฉ์ž ๋“ฑ๋ก (๋ชจ๋“  ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ ํ™•์ธ ํ›„)"); + + // 4. ์ƒˆ ์‚ฌ์šฉ์ž ๋“ฑ๋ก (์กฐํšŒํ•œ ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ ๊ธฐ๋ฐ˜์œผ๋กœ) + Map registerRequest = new HashMap<>(); + registerRequest.put("name", "๊น€์ฒ ์ˆ˜"); + registerRequest.put("email", "kim.chulsoo@example.com"); + registerRequest.put("orgId", 1); + registerRequest.put("deptId", 2); // ์กฐํšŒํ•œ ๋ถ€์„œ ์ •๋ณด ๊ธฐ๋ฐ˜ + registerRequest.put("positionId", 5); // ์กฐํšŒํ•œ ์ง๊ธ‰ ์ •๋ณด ๊ธฐ๋ฐ˜ + registerRequest.put("roleIds", Arrays.asList(6, 7, 8)); // ์กฐํšŒํ•œ ์—ญํ•  ์ •๋ณด ๊ธฐ๋ฐ˜ + registerRequest.put("password", null); + + HttpHeaders registerHeaders = new HttpHeaders(); + registerHeaders.setContentType(MediaType.APPLICATION_JSON); + registerHeaders.set("Origin", "https://admin.icebang.site"); + registerHeaders.set("Referer", "https://admin.icebang.site/"); + + HttpEntity> registerEntity = + new HttpEntity<>(registerRequest, registerHeaders); + + ResponseEntity registerResponse = + restTemplate.postForEntity(getV0ApiUrl("/auth/register"), registerEntity, Map.class); + + assertThat(registerResponse.getStatusCode()).isEqualTo(HttpStatus.CREATED); + assertThat((Boolean) registerResponse.getBody().get("success")).isTrue(); + + logSuccess("์ƒˆ ์‚ฌ์šฉ์ž ๋“ฑ๋ก ์„ฑ๊ณต"); + logSuccess( + "๋“ฑ๋ก๋œ ์‚ฌ์šฉ์ž: " + registerRequest.get("name") + " (" + registerRequest.get("email") + ")"); + + logCompletion("ERP ์‚ฌ์šฉ์ž ๋“ฑ๋ก ํ”Œ๋กœ์šฐ"); + } + + @Disabled + @DisplayName("๋กœ๊ทธ์ธ ์—†์ด ๋ฆฌ์†Œ์Šค ์ ‘๊ทผ ์‹œ ๋ชจ๋“  ์š”์ฒญ ์ฐจ๋‹จ") + void accessResourcesWithoutLogin_shouldFailForAll() { + logStep(1, "์ธ์ฆ ์—†์ด ์กฐ์ง ๋ชฉ๋ก ์กฐํšŒ ์‹œ๋„"); + + // 1. ๋กœ๊ทธ์ธ ์—†์ด ์กฐ์ง ๋ชฉ๋ก ์กฐํšŒ ์‹œ๋„ + ResponseEntity orgResponse = + restTemplate.getForEntity(getV0ApiUrl("/organizations"), Map.class); + + assertThat(orgResponse.getStatusCode()).isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN); + logSuccess("๋ฏธ์ธ์ฆ ์กฐ์ง ์กฐํšŒ ์ฐจ๋‹จ ํ™•์ธ"); + + logStep(2, "์ธ์ฆ ์—†์ด ์กฐ์ง ์˜ต์…˜ ์กฐํšŒ ์‹œ๋„"); + + // 2. ๋กœ๊ทธ์ธ ์—†์ด ์กฐ์ง ์˜ต์…˜ ์กฐํšŒ ์‹œ๋„ + ResponseEntity optResponse = + restTemplate.getForEntity(getV0ApiUrl("/organizations/1/options"), Map.class); + + assertThat(optResponse.getStatusCode()).isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN); + logSuccess("๋ฏธ์ธ์ฆ ์˜ต์…˜ ์กฐํšŒ ์ฐจ๋‹จ ํ™•์ธ"); + + logStep(3, "์ธ์ฆ ์—†์ด ํšŒ์›๊ฐ€์ž… ์‹œ๋„"); + + // 3. ๋กœ๊ทธ์ธ ์—†์ด ํšŒ์›๊ฐ€์ž… ์‹œ๋„ + Map registerRequest = new HashMap<>(); + registerRequest.put("name", "ํ…Œ์ŠคํŠธ์‚ฌ์šฉ์ž"); + registerRequest.put("email", "test@example.com"); + registerRequest.put("orgId", 1); + registerRequest.put("deptId", 2); + registerRequest.put("positionId", 5); + registerRequest.put("roleIds", Arrays.asList(6)); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> entity = new HttpEntity<>(registerRequest, headers); + + ResponseEntity regResponse = + restTemplate.postForEntity(getV0ApiUrl("/auth/register"), entity, Map.class); + + assertThat(regResponse.getStatusCode()).isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN); + logSuccess("๋ฏธ์ธ์ฆ ํšŒ์›๊ฐ€์ž… ์ฐจ๋‹จ ํ™•์ธ"); + + logCompletion("ERP ๋ณด์•ˆ ๊ฒ€์ฆ"); + } + + @Test + @DisplayName("์ž˜๋ชป๋œ ์ž๊ฒฉ์ฆ๋ช…์œผ๋กœ ๋กœ๊ทธ์ธ ์‹œ๋„ ์‹œ ์‹คํŒจ") + void loginWithInvalidCredentials_shouldFail() { + logStep(1, "์ž˜๋ชป๋œ ๋น„๋ฐ€๋ฒˆํ˜ธ๋กœ ๋กœ๊ทธ์ธ ์‹œ๋„"); + + Map wrongPasswordRequest = new HashMap<>(); + wrongPasswordRequest.put("email", "admin@icebang.site"); + wrongPasswordRequest.put("password", "wrongpassword"); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> entity = new HttpEntity<>(wrongPasswordRequest, headers); + + ResponseEntity response = + restTemplate.postForEntity(getV0ApiUrl("/auth/login"), entity, Map.class); + + assertThat(response.getStatusCode()).isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN); + logSuccess("์ž˜๋ชป๋œ ์ž๊ฒฉ์ฆ๋ช… ๋กœ๊ทธ์ธ ์ฐจ๋‹จ ํ™•์ธ"); + + logStep(2, "์กด์žฌํ•˜์ง€ ์•Š๋Š” ์‚ฌ์šฉ์ž๋กœ ๋กœ๊ทธ์ธ ์‹œ๋„"); + + Map nonExistentUserRequest = new HashMap<>(); + nonExistentUserRequest.put("email", "nonexistent@example.com"); + nonExistentUserRequest.put("password", "anypassword"); + + HttpEntity> nonExistentEntity = + new HttpEntity<>(nonExistentUserRequest, headers); + + ResponseEntity nonExistentResponse = + restTemplate.postForEntity(getV0ApiUrl("/auth/login"), nonExistentEntity, Map.class); + + assertThat(nonExistentResponse.getStatusCode()) + .isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN); + logSuccess("์กด์žฌํ•˜์ง€ ์•Š๋Š” ์‚ฌ์šฉ์ž ๋กœ๊ทธ์ธ ์ฐจ๋‹จ ํ™•์ธ"); + } + + @SuppressWarnings("unchecked") + @Disabled + @DisplayName("์ค‘๋ณต ์ด๋ฉ”์ผ๋กœ ์‚ฌ์šฉ์ž ๋“ฑ๋ก ์‹œ๋„ ์‹œ ์‹คํŒจ") + void register_withDuplicateEmail_shouldFail() { + // ์„ ํ–‰ ์กฐ๊ฑด: ๊ด€๋ฆฌ์ž ๋กœ๊ทธ์ธ + performAdminLogin(); + + // ์ฒซ ๋ฒˆ์งธ ์‚ฌ์šฉ์ž ๋“ฑ๋ก (์‹ค์ œ API ๋ฐ์ดํ„ฐ ๊ธฐ๋ฐ˜) + registerUser("first.user@example.com", "์ฒซ๋ฒˆ์งธ์‚ฌ์šฉ์ž"); + + logStep(1, "์ค‘๋ณต ์ด๋ฉ”์ผ๋กœ ํšŒ์›๊ฐ€์ž… ์‹œ๋„"); + + // ์กฐ์ง ๋ฐ ์˜ต์…˜ ์ •๋ณด ๋‹ค์‹œ ์กฐํšŒ (์‹ค์ œ ๊ฐ’ ์‚ฌ์šฉ) + ResponseEntity organizationsResponse = + restTemplate.getForEntity(getV0ApiUrl("/organizations"), Map.class); + java.util.List> organizations = + (java.util.List>) organizationsResponse.getBody().get("data"); + Integer orgId = (Integer) organizations.getFirst().get("id"); + + ResponseEntity optionsResponse = + restTemplate.getForEntity(getV0ApiUrl("/organizations/" + orgId + "/options"), Map.class); + Map optionData = (Map) optionsResponse.getBody().get("data"); + + java.util.List> departments = + (java.util.List>) optionData.get("departments"); + java.util.List> positions = + (java.util.List>) optionData.get("positions"); + java.util.List> roles = + (java.util.List>) optionData.get("roles"); + + Integer deptId = (Integer) departments.getFirst().get("id"); + Integer positionId = (Integer) positions.getFirst().get("id"); + Integer roleId = (Integer) roles.getFirst().get("id"); + + // ๋™์ผํ•œ ์ด๋ฉ”์ผ๋กœ ๋‹ค์‹œ ๋“ฑ๋ก ์‹œ๋„ + Map duplicateRequest = new HashMap<>(); + duplicateRequest.put("name", "์ค‘๋ณต์‚ฌ์šฉ์ž"); + duplicateRequest.put("email", "first.user@example.com"); // ์ค‘๋ณต ์ด๋ฉ”์ผ + duplicateRequest.put("orgId", orgId); + duplicateRequest.put("deptId", deptId); + duplicateRequest.put("positionId", positionId); + duplicateRequest.put("roleIds", Collections.singletonList(roleId)); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> entity = new HttpEntity<>(duplicateRequest, headers); + + ResponseEntity response = + restTemplate.postForEntity(getV0ApiUrl("/auth/register"), entity, Map.class); + + // ์ค‘๋ณต ์ด๋ฉ”์ผ ์ฒ˜๋ฆฌ ํ™•์ธ + assertThat(response.getStatusCode()) + .isIn(HttpStatus.BAD_REQUEST, HttpStatus.CONFLICT, HttpStatus.UNPROCESSABLE_ENTITY); + + logSuccess("์ค‘๋ณต ์ด๋ฉ”์ผ ๋“ฑ๋ก ์ฐจ๋‹จ ํ™•์ธ"); + } + + /** ๊ด€๋ฆฌ์ž ๋กœ๊ทธ์ธ์„ ์ˆ˜ํ–‰ํ•˜๋Š” ํ—ฌํผ ๋ฉ”์„œ๋“œ */ + private void performAdminLogin() { + Map loginRequest = new HashMap<>(); + loginRequest.put("email", "admin@icebang.site"); + loginRequest.put("password", "qwer1234!A"); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> entity = new HttpEntity<>(loginRequest, headers); + + ResponseEntity response = + restTemplate.postForEntity(getV0ApiUrl("/auth/login"), entity, Map.class); + + if (response.getStatusCode() != HttpStatus.OK) { + logError("๊ด€๋ฆฌ์ž ๋กœ๊ทธ์ธ ์‹คํŒจ: " + response.getStatusCode()); + throw new RuntimeException("Admin login failed"); + } + + logSuccess("๊ด€๋ฆฌ์ž ๋กœ๊ทธ์ธ ์™„๋ฃŒ"); + } + + /** ์‚ฌ์šฉ์ž ๋“ฑ๋ก์„ ์ˆ˜ํ–‰ํ•˜๋Š” ํ—ฌํผ ๋ฉ”์„œ๋“œ */ + private void registerUser(String email, String name) { + Map registerRequest = new HashMap<>(); + registerRequest.put("name", name); + registerRequest.put("email", email); + registerRequest.put("orgId", 1); + registerRequest.put("deptId", 2); + registerRequest.put("positionId", 5); + registerRequest.put("roleIds", Arrays.asList(6, 7, 8)); + registerRequest.put("password", null); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> entity = new HttpEntity<>(registerRequest, headers); + restTemplate.postForEntity(getV0ApiUrl("/auth/register"), entity, Map.class); + } +} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/annotation/E2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/annotation/E2eTest.java similarity index 89% rename from apps/user-service/src/test/java/com/gltkorea/icebang/annotation/E2eTest.java rename to apps/user-service/src/test/java/site/icebang/e2e/setup/annotation/E2eTest.java index 43290a4a..e7d3ef09 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/annotation/E2eTest.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/annotation/E2eTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.annotation; +package site.icebang.e2e.setup.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/config/E2eTestConfiguration.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java similarity index 88% rename from apps/user-service/src/test/java/com/gltkorea/icebang/config/E2eTestConfiguration.java rename to apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java index 054360b1..4976d0b8 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/config/E2eTestConfiguration.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.config; +package site.icebang.e2e.setup.config; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.testcontainers.service.connection.ServiceConnection; @@ -6,9 +6,14 @@ import org.springframework.test.context.DynamicPropertyRegistry; import org.springframework.test.context.DynamicPropertySource; import org.testcontainers.containers.MariaDBContainer; +import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper; @TestConfiguration(proxyBeanMethods = false) public class E2eTestConfiguration { + @Bean + public ObjectMapper objectMapper() { + return new ObjectMapper(); + } @Bean @ServiceConnection diff --git a/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java new file mode 100644 index 00000000..c2d10870 --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java @@ -0,0 +1,60 @@ +package site.icebang.e2e.setup.support; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.web.client.TestRestTemplate; +import org.springframework.boot.test.web.server.LocalServerPort; +import org.springframework.context.annotation.Import; +import org.springframework.test.web.servlet.MockMvc; +import org.springframework.web.context.WebApplicationContext; +import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper; + +import site.icebang.e2e.setup.annotation.E2eTest; +import site.icebang.e2e.setup.config.E2eTestConfiguration; + +@Import(E2eTestConfiguration.class) +@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) +@E2eTest +public abstract class E2eTestSupport { + @Autowired protected TestRestTemplate restTemplate; + + @Autowired protected ObjectMapper objectMapper; + + @LocalServerPort protected int port; + + @Autowired protected WebApplicationContext webApplicationContext; + + protected MockMvc mockMvc; + + protected String getBaseUrl() { + return "http://localhost:" + port; + } + + protected String getApiUrl(String path) { + return getBaseUrl() + path; + } + + protected String getV0ApiUrl(String path) { + return getBaseUrl() + "/v0" + path; + } + + /** ํ…Œ์ŠคํŠธ ์‹œ๋‚˜๋ฆฌ์˜ค ๋‹จ๊ณ„๋ณ„ ๋กœ๊น…์„ ์œ„ํ•œ ์œ ํ‹ธ๋ฆฌํ‹ฐ ๋ฉ”์„œ๋“œ */ + protected void logStep(int stepNumber, String description) { + System.out.println(String.format("๐Ÿ“‹ Step %d: %s", stepNumber, description)); + } + + /** ํ…Œ์ŠคํŠธ ์„ฑ๊ณต ๋กœ๊น…์„ ์œ„ํ•œ ์œ ํ‹ธ๋ฆฌํ‹ฐ ๋ฉ”์„œ๋“œ */ + protected void logSuccess(String message) { + System.out.println("โœ… " + message); + } + + /** ํ…Œ์ŠคํŠธ ์‹คํŒจ ๋กœ๊น…์„ ์œ„ํ•œ ์œ ํ‹ธ๋ฆฌํ‹ฐ ๋ฉ”์„œ๋“œ */ + protected void logError(String message) { + System.out.println("โŒ " + message); + } + + /** ํ…Œ์ŠคํŠธ ์™„๋ฃŒ ๋กœ๊น…์„ ์œ„ํ•œ ์œ ํ‹ธ๋ฆฌํ‹ฐ ๋ฉ”์„œ๋“œ */ + protected void logCompletion(String scenario) { + System.out.println(String.format("๐ŸŽ‰ %s ์‹œ๋‚˜๋ฆฌ์˜ค ์™„๋ฃŒ!", scenario)); + } +} diff --git a/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupportTest.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupportTest.java new file mode 100644 index 00000000..f9fe164e --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupportTest.java @@ -0,0 +1,17 @@ +package site.icebang.e2e.setup.support; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.api.Test; +import org.springframework.test.annotation.DirtiesContext; + +@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) +class E2eTestSupportTest extends E2eTestSupport { + + @Test + void shouldStartWithRandomPort() { + // ํฌํŠธ๊ฐ€ ์ œ๋Œ€๋กœ ํ• ๋‹น๋˜์—ˆ๋Š”์ง€ ํ™•์ธ + assertThat(port).isGreaterThan(0); + assertThat(getBaseUrl()).startsWith("http://localhost:"); + } +} diff --git a/apps/user-service/src/test/java/site/icebang/integration/setup/annotation/IntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/setup/annotation/IntegrationTest.java new file mode 100644 index 00000000..77dfddf9 --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/integration/setup/annotation/IntegrationTest.java @@ -0,0 +1,15 @@ +package site.icebang.integration.setup.annotation; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.Tag; +import org.springframework.test.context.ActiveProfiles; + +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Tag("integration") +@ActiveProfiles("test-integration") +public @interface IntegrationTest {} diff --git a/apps/user-service/src/test/java/site/icebang/integration/setup/config/RestDocsConfiguration.java b/apps/user-service/src/test/java/site/icebang/integration/setup/config/RestDocsConfiguration.java new file mode 100644 index 00000000..f60de9cc --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/integration/setup/config/RestDocsConfiguration.java @@ -0,0 +1,29 @@ +package site.icebang.integration.setup.config; + +import org.springframework.boot.test.context.TestConfiguration; +import org.springframework.context.annotation.Bean; +import org.springframework.restdocs.mockmvc.MockMvcRestDocumentation; +import org.springframework.restdocs.mockmvc.RestDocumentationResultHandler; +import org.springframework.restdocs.operation.preprocess.Preprocessors; + +import com.fasterxml.jackson.databind.ObjectMapper; + +@TestConfiguration +public class RestDocsConfiguration { + + @Bean + public RestDocumentationResultHandler restDocumentationResultHandler() { + return MockMvcRestDocumentation.document( + "{class-name}/{method-name}", + Preprocessors.preprocessRequest( + Preprocessors.removeHeaders("Host", "Content-Length"), Preprocessors.prettyPrint()), + Preprocessors.preprocessResponse( + Preprocessors.removeHeaders("Content-Length", "Date", "Keep-Alive", "Connection"), + Preprocessors.prettyPrint())); + } + + @Bean + public ObjectMapper testObjectMapper() { + return new ObjectMapper(); + } +} diff --git a/apps/user-service/src/test/java/site/icebang/integration/setup/support/IntegrationTestSupport.java b/apps/user-service/src/test/java/site/icebang/integration/setup/support/IntegrationTestSupport.java new file mode 100644 index 00000000..ca28cd37 --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/integration/setup/support/IntegrationTestSupport.java @@ -0,0 +1,36 @@ +package site.icebang.integration.setup.support; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.restdocs.AutoConfigureRestDocs; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.web.server.LocalServerPort; +import org.springframework.context.annotation.Import; +import org.springframework.test.web.servlet.MockMvc; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import site.icebang.integration.setup.annotation.IntegrationTest; +import site.icebang.integration.setup.config.RestDocsConfiguration; + +@IntegrationTest +@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) +@AutoConfigureMockMvc +@AutoConfigureRestDocs +@Import(RestDocsConfiguration.class) +public abstract class IntegrationTestSupport { + + @Autowired protected MockMvc mockMvc; + + @Autowired protected ObjectMapper objectMapper; + + @LocalServerPort protected int port; + + /** RestDocs์—์„œ ์‹ค์ œ API ํ˜ธ์ถœ ์ฃผ์†Œ๋ฅผ ํ‘œ๊ธฐํ•  ๋•Œ ์‚ฌ์šฉ */ + protected String getApiUrlForDocs(String path) { + if (path.startsWith("/")) { + return "http://localhost:" + port + path; + } + return "http://localhost:" + port + "/" + path; + } +} diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java new file mode 100644 index 00000000..5c538105 --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java @@ -0,0 +1,82 @@ +package site.icebang.integration.tests.auth; + +import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; +import static com.epages.restdocs.apispec.ResourceDocumentation.*; +import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; +import static org.springframework.restdocs.operation.preprocess.Preprocessors.*; +import static org.springframework.restdocs.payload.PayloadDocumentation.*; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.http.*; +import org.springframework.restdocs.payload.JsonFieldType; +import org.springframework.test.context.jdbc.Sql; +import org.springframework.transaction.annotation.Transactional; + +import com.epages.restdocs.apispec.ResourceSnippetParameters; + +import site.icebang.integration.setup.support.IntegrationTestSupport; + +@Sql( + value = "classpath:sql/01-insert-internal-users.sql", + executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) +@Transactional +class AuthApiIntegrationTest extends IntegrationTestSupport { + @Test + @DisplayName("์‚ฌ์šฉ์ž ๋กœ๊ทธ์ธ ์„ฑ๊ณต") + void login_success() throws Exception { + // given + Map loginRequest = new HashMap<>(); + loginRequest.put("email", "admin@icebang.site"); + loginRequest.put("password", "qwer1234!A"); + + // MockMvc๋กœ REST Docs + OpenAPI ์ƒ์„ฑ + mockMvc + .perform( + post(getApiUrlForDocs("/v0/auth/login")) + .contentType(MediaType.APPLICATION_JSON) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/") + .content(objectMapper.writeValueAsString(loginRequest))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.status").value("OK")) + .andExpect(jsonPath("$.message").value("OK")) + .andExpect(jsonPath("$.data").isEmpty()) + .andDo( + document( + "auth-login", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Authentication") + .summary("์‚ฌ์šฉ์ž ๋กœ๊ทธ์ธ") + .description("์ด๋ฉ”์ผ๊ณผ ๋น„๋ฐ€๋ฒˆํ˜ธ๋กœ ์‚ฌ์šฉ์ž ์ธ์ฆ์„ ์ˆ˜ํ–‰ํ•ฉ๋‹ˆ๋‹ค") + .requestFields( + fieldWithPath("email") + .type(JsonFieldType.STRING) + .description("์‚ฌ์šฉ์ž ์ด๋ฉ”์ผ ์ฃผ์†Œ"), + fieldWithPath("password") + .type(JsonFieldType.STRING) + .description("์‚ฌ์šฉ์ž ๋น„๋ฐ€๋ฒˆํ˜ธ")) + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("์š”์ฒญ ์„ฑ๊ณต ์—ฌ๋ถ€"), + fieldWithPath("data") + .type(JsonFieldType.NULL) + .description("์‘๋‹ต ๋ฐ์ดํ„ฐ (๋กœ๊ทธ์ธ ์„ฑ๊ณต ์‹œ null)"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("์‘๋‹ต ๋ฉ”์‹œ์ง€"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP ์ƒํƒœ")) + .build()))); + } +} diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java new file mode 100644 index 00000000..666a8ea5 --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java @@ -0,0 +1,171 @@ +package site.icebang.integration.tests.organization; + +import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; +import static com.epages.restdocs.apispec.ResourceDocumentation.*; +import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; +import static org.springframework.restdocs.operation.preprocess.Preprocessors.*; +import static org.springframework.restdocs.payload.PayloadDocumentation.*; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.http.*; +import org.springframework.restdocs.payload.JsonFieldType; +import org.springframework.test.context.jdbc.Sql; +import org.springframework.test.web.servlet.MvcResult; +import org.springframework.transaction.annotation.Transactional; + +import com.epages.restdocs.apispec.ResourceSnippetParameters; +import com.fasterxml.jackson.databind.JsonNode; + +import site.icebang.integration.setup.support.IntegrationTestSupport; + +@Sql( + value = { + "classpath:sql/01-insert-internal-users.sql", + "classpath:sql/02-insert-external-users.sql" + }, + executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) +@Transactional +class OrganizationApiIntegrationTest extends IntegrationTestSupport { + + @Test + @DisplayName("์กฐ์ง ๋ชฉ๋ก ์กฐํšŒ ์„ฑ๊ณต") + void getOrganizations_success() throws Exception { + // when & then + mockMvc + .perform( + get(getApiUrlForDocs("/v0/organizations")) + .contentType(MediaType.APPLICATION_JSON) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.status").value("OK")) + .andExpect(jsonPath("$.message").value("OK")) + .andExpect(jsonPath("$.data").isArray()) + .andExpect(jsonPath("$.data[0].id").exists()) + .andExpect(jsonPath("$.data[0].organizationName").exists()) + .andDo( + document( + "organizations-list", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Organization") + .summary("์กฐ์ง ๋ชฉ๋ก ์กฐํšŒ") + .description("์‹œ์Šคํ…œ์— ๋“ฑ๋ก๋œ ๋ชจ๋“  ์กฐ์ง์˜ ๋ชฉ๋ก์„ ์กฐํšŒํ•ฉ๋‹ˆ๋‹ค") + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("์š”์ฒญ ์„ฑ๊ณต ์—ฌ๋ถ€"), + fieldWithPath("data[]").type(JsonFieldType.ARRAY).description("์กฐ์ง ๋ชฉ๋ก"), + fieldWithPath("data[].id") + .type(JsonFieldType.NUMBER) + .description("์กฐ์ง ID"), + fieldWithPath("data[].organizationName") + .type(JsonFieldType.STRING) + .description("์กฐ์ง๋ช…"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("์‘๋‹ต ๋ฉ”์‹œ์ง€"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP ์ƒํƒœ")) + .build()))); + } + + @Test + @DisplayName("์กฐ์ง ์˜ต์…˜ ์ •๋ณด ์กฐํšŒ ์„ฑ๊ณต") + void getOrganizationOptions_success() throws Exception { + // given - ๋จผ์ € ์กฐ์ง ๋ชฉ๋ก์„ ์กฐํšŒํ•ด์„œ ์‹ค์ œ ์กด์žฌํ•˜๋Š” ID๋ฅผ ๊ฐ€์ ธ์˜ด + MvcResult organizationsResult = + mockMvc + .perform(get("/v0/organizations").contentType(MediaType.APPLICATION_JSON)) + .andExpect(status().isOk()) + .andReturn(); + + String responseBody = organizationsResult.getResponse().getContentAsString(); + JsonNode jsonNode = objectMapper.readTree(responseBody); + JsonNode organizations = jsonNode.get("data"); + + // ์ฒซ ๋ฒˆ์งธ ์กฐ์ง์˜ ID๋ฅผ ๊ฐ€์ ธ์˜ด + Long organizationId = organizations.get(0).get("id").asLong(); + + // when & then + mockMvc + .perform( + get(getApiUrlForDocs("/v0/organizations/{organizationId}/options"), organizationId) + .contentType(MediaType.APPLICATION_JSON) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.status").value("OK")) + .andExpect(jsonPath("$.message").value("OK")) + .andExpect(jsonPath("$.data.departments").isArray()) + .andExpect(jsonPath("$.data.positions").isArray()) + .andExpect(jsonPath("$.data.roles").isArray()) + .andExpect(jsonPath("$.data.departments[0].id").exists()) + .andExpect(jsonPath("$.data.departments[0].name").exists()) + .andExpect(jsonPath("$.data.positions[0].id").exists()) + .andExpect(jsonPath("$.data.positions[0].title").exists()) + .andExpect(jsonPath("$.data.roles[0].id").exists()) + .andExpect(jsonPath("$.data.roles[0].name").exists()) + .andExpect(jsonPath("$.data.roles[0].description").exists()) + .andDo( + document( + "organizations-options", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Organization") + .summary("์กฐ์ง ์˜ต์…˜ ์ •๋ณด ์กฐํšŒ") + .description("ํŠน์ • ์กฐ์ง์˜ ๋ถ€์„œ, ์ง๊ธ‰, ์—ญํ•  ์˜ต์…˜ ์ •๋ณด๋ฅผ ์กฐํšŒํ•ฉ๋‹ˆ๋‹ค") + .pathParameters(parameterWithName("organizationId").description("์กฐ์ง ID")) + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("์š”์ฒญ ์„ฑ๊ณต ์—ฌ๋ถ€"), + fieldWithPath("data").type(JsonFieldType.OBJECT).description("์˜ต์…˜ ๋ฐ์ดํ„ฐ"), + fieldWithPath("data.departments[]") + .type(JsonFieldType.ARRAY) + .description("๋ถ€์„œ ๋ชฉ๋ก"), + fieldWithPath("data.departments[].id") + .type(JsonFieldType.NUMBER) + .description("๋ถ€์„œ ID"), + fieldWithPath("data.departments[].name") + .type(JsonFieldType.STRING) + .description("๋ถ€์„œ๋ช…"), + fieldWithPath("data.positions[]") + .type(JsonFieldType.ARRAY) + .description("์ง๊ธ‰ ๋ชฉ๋ก"), + fieldWithPath("data.positions[].id") + .type(JsonFieldType.NUMBER) + .description("์ง๊ธ‰ ID"), + fieldWithPath("data.positions[].title") + .type(JsonFieldType.STRING) + .description("์ง๊ธ‰๋ช…"), + fieldWithPath("data.roles[]") + .type(JsonFieldType.ARRAY) + .description("์—ญํ•  ๋ชฉ๋ก"), + fieldWithPath("data.roles[].id") + .type(JsonFieldType.NUMBER) + .description("์—ญํ•  ID"), + fieldWithPath("data.roles[].name") + .type(JsonFieldType.STRING) + .description("์—ญํ• ๋ช…"), + fieldWithPath("data.roles[].description") + .type(JsonFieldType.STRING) + .description("์—ญํ•  ์„ค๋ช…"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("์‘๋‹ต ๋ฉ”์‹œ์ง€"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP ์ƒํƒœ")) + .build()))); + } +} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/annotation/UnitTest.java b/apps/user-service/src/test/java/site/icebang/unit/setup/annotation/UnitTest.java similarity index 89% rename from apps/user-service/src/test/java/com/gltkorea/icebang/annotation/UnitTest.java rename to apps/user-service/src/test/java/site/icebang/unit/setup/annotation/UnitTest.java index 1927475a..65afc91b 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/annotation/UnitTest.java +++ b/apps/user-service/src/test/java/site/icebang/unit/setup/annotation/UnitTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.annotation; +package site.icebang.unit.setup.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/support/UnitTestSupport.java b/apps/user-service/src/test/java/site/icebang/unit/setup/support/UnitTestSupport.java similarity index 50% rename from apps/user-service/src/test/java/com/gltkorea/icebang/support/UnitTestSupport.java rename to apps/user-service/src/test/java/site/icebang/unit/setup/support/UnitTestSupport.java index 88c4315e..3977703c 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/support/UnitTestSupport.java +++ b/apps/user-service/src/test/java/site/icebang/unit/setup/support/UnitTestSupport.java @@ -1,10 +1,7 @@ -package com.gltkorea.icebang.support; +package site.icebang.unit.setup.support; -import org.springframework.boot.test.context.SpringBootTest; +import site.icebang.unit.setup.annotation.UnitTest; -import com.gltkorea.icebang.annotation.UnitTest; - -@SpringBootTest @UnitTest public abstract class UnitTestSupport { diff --git a/docker/production/docker-compose.yml b/docker/production/docker-compose.yml index fdfdaadf..fa3ca0cc 100644 --- a/docker/production/docker-compose.yml +++ b/docker/production/docker-compose.yml @@ -9,7 +9,20 @@ services: - "80:8080" networks: - app-network + env_file: + - .env.prod + pre-processing-service: + image: ghcr.io/kernel180-be12/final-4team-icebang/pre-processing-service:latest + container_name: pre-processing-service + restart: always + ports: + - "8000:8000" + networks: + - app-network + env_file: + - .env.prod + networks: app-network: driver: bridge