diff --git a/.github/workflows/ci-python.yml b/.github/workflows/ci-python.yml
index ad50c51b..54d1ab1a 100644
--- a/.github/workflows/ci-python.yml
+++ b/.github/workflows/ci-python.yml
@@ -2,6 +2,8 @@ name: CI (Python/FastAPI)
on:
push:
+ branches:
+ - feature/onnx
tags:
- 'pre-processing-v*'
pull_request:
diff --git a/.github/workflows/deploy-java.yml b/.github/workflows/deploy-java.yml
index 9c876f2f..d7526506 100644
--- a/.github/workflows/deploy-java.yml
+++ b/.github/workflows/deploy-java.yml
@@ -28,6 +28,9 @@ jobs:
echo "DB_USER=${{ secrets.DB_USER }}" >> .env.prod
echo "DB_PASS=${{ secrets.DB_PASS }}" >> .env.prod
echo "DB_NAME=${{ secrets.DB_NAME }}" >> .env.prod
+ echo "ENV_NAME=${{ secrets.LOKI_URL }}" >> .env.prod
+ echo "ENV_NAME=${{ secrets.LOKI_USERNAME }}" >> .env.prod
+ echo "ENV_NAME=${{ secrets.LOKI_PASSWORD }}" >> .env.prod
echo "ENV_NAME=${{ secrets.ENV_NAME }}" >> .env.prod
- name: Set repo lowercase
@@ -52,6 +55,27 @@ jobs:
target: "~/app/docker/production/"
overwrite: true
+ - name: Copy Caddyfile to EC2
+ uses: appleboy/scp-action@v0.1.7
+ with:
+ host: ${{ secrets.SERVER_HOST }}
+ username: ubuntu
+ key: ${{ secrets.SERVER_SSH_KEY }}
+ source: "docker/production/Caddyfile"
+ target: "~/app/docker/production/"
+ overwrite: true
+
+ - name: Copy promtail-config to EC2
+ uses: appleboy/scp-action@v0.1.7
+ with:
+ host: ${{ secrets.SERVER_HOST }}
+ username: ubuntu
+ key: ${{ secrets.SERVER_SSH_KEY }}
+ source: "docker/production/promtail-config.yml"
+ target: "~/app/docker/production/"
+ overwrite: true
+
+
- name: Deploy on EC2
uses: appleboy/ssh-action@v1.0.3
with:
diff --git a/apps/pre-processing-service/app/api/endpoints/blog.py b/apps/pre-processing-service/app/api/endpoints/blog.py
index 04ae0b14..d0d078e8 100644
--- a/apps/pre-processing-service/app/api/endpoints/blog.py
+++ b/apps/pre-processing-service/app/api/endpoints/blog.py
@@ -4,16 +4,16 @@
from ...model.schemas import *
from app.service.blog.tistory_blog_post_service import TistoryBlogPostService
from app.service.blog.naver_blog_post_service import NaverBlogPostService
-from ...service.blog.blogger_blog_post_service import BloggerBlogPostService
+from ...service.blog.blogger_blog_post_adapter import (
+ BloggerBlogPostAdapter,
+) # 수정된 import
+from app.utils.response import Response
+from app.service.blog.blog_create_service import BlogContentService
+from app.service.blog.blog_publish_service import BlogPublishService
router = APIRouter()
-@router.get("/", summary="블로그 API 상태 확인")
-async def root():
- return {"message": "blog API"}
-
-
@router.post(
"/rag/create",
response_model=ResponseBlogCreate,
@@ -23,7 +23,10 @@ async def rag_create(request: RequestBlogCreate):
"""
RAG 기반 블로그 콘텐츠 생성
"""
- return {"message": "blog API"}
+ blog_service = BlogContentService()
+ response_data = blog_service.generate_blog_content(request)
+
+ return Response.ok(response_data)
@router.post(
@@ -37,52 +40,7 @@ async def publish(request: RequestBlogPublish):
네이버 블로그와 티스토리 블로그를 지원하며,
현재는 생성된 콘텐츠가 아닌 임의의 제목, 내용, 태그를 배포합니다.
"""
- if request.tag == "naver":
- naver_service = NaverBlogPostService()
- result = naver_service.post_content(
- title=request.post_title,
- content=request.post_content,
- tags=request.post_tags,
- )
-
- if not result:
- raise CustomException(
- "네이버 블로그 포스팅에 실패했습니다.", status_code=500
- )
- return ResponseBlogPublish(
- job_id=1, schedule_id=1, schedule_his_id=1, status="200", metadata=result
- )
-
- elif request.tag == "tistory":
- tistory_service = TistoryBlogPostService()
- result = tistory_service.post_content(
- title=request.post_title,
- content=request.post_content,
- tags=request.post_tags,
- )
-
- if not result:
- raise CustomException(
- "티스토리 블로그 포스팅에 실패했습니다.", status_code=500
- )
-
- return ResponseBlogPublish(
- job_id=1, schedule_id=1, schedule_his_id=1, status="200", metadata=result
- )
-
- elif request.tag == "blogger":
- blogger_service = BloggerBlogPostService()
- result = blogger_service.post_content(
- title=request.post_title,
- content=request.post_content,
- tags=request.post_tags,
- )
-
- if not result:
- raise CustomException(
- "블로거 블로그 포스팅에 실패했습니다.", status_code=500
- )
+ publish_service = BlogPublishService()
+ response_data = publish_service.publish_content(request)
- return ResponseBlogPublish(
- job_id=1, schedule_id=1, schedule_his_id=1, status="200", metadata=result
- )
+ return Response.ok(response_data)
diff --git a/apps/pre-processing-service/app/api/endpoints/keywords.py b/apps/pre-processing-service/app/api/endpoints/keywords.py
index 2b407d6d..6c1627bd 100644
--- a/apps/pre-processing-service/app/api/endpoints/keywords.py
+++ b/apps/pre-processing-service/app/api/endpoints/keywords.py
@@ -6,14 +6,6 @@
router = APIRouter()
-@router.get("/", summary="키워드 API 상태 확인")
-async def root():
- """
- 키워드 API가 정상 동작하는지 확인
- """
- return {"message": "keyword API"}
-
-
@router.post(
"/search", response_model=ResponseNaverSearch, summary="네이버 키워드 검색"
)
@@ -23,9 +15,6 @@ async def search(request: RequestNaverSearch):
요청 예시:
{
- "job_id": 1,
- "schedule_id": 1,
- "schedule_his_id": 1,
"tag": "naver",
"category": "50000000",
"start_date": "2025-09-01",
@@ -34,15 +23,3 @@ async def search(request: RequestNaverSearch):
"""
response_data = await keyword_search(request)
return response_data
-
-
-@router.post(
- "/ssadagu/validate",
- response_model=ResponseNaverSearch,
- summary="사다구몰 키워드 검증",
-)
-async def ssadagu_validate(request: RequestNaverSearch):
- """
- 사다구몰 키워드 검증 테스트용 엔드포인트
- """
- return ResponseNaverSearch()
diff --git a/apps/pre-processing-service/app/api/endpoints/product.py b/apps/pre-processing-service/app/api/endpoints/product.py
index ceb55c9d..32a4dcbe 100644
--- a/apps/pre-processing-service/app/api/endpoints/product.py
+++ b/apps/pre-processing-service/app/api/endpoints/product.py
@@ -8,6 +8,7 @@
from ...service.crawl_service import CrawlService
from ...service.search_service import SearchService
from ...service.match_service import MatchService
+from ...service.similarity_service import SimilarityService
# from ...service.similarity_service import SimilarityService
@@ -16,14 +17,6 @@
router = APIRouter()
-@router.get("/", summary="상품 API 상태 확인")
-async def root():
- """
- 상품 API 서버 상태 확인용 엔드포인트
- """
- return {"message": "product API"}
-
-
@router.post("/search", response_model=ResponseSadaguSearch, summary="상품 검색")
async def search(request: RequestSadaguSearch):
"""
@@ -31,12 +24,12 @@ async def search(request: RequestSadaguSearch):
"""
try:
search_service = SearchService()
- result = await search_service.search_products(request)
+ response_data = await search_service.search_products(request)
- if not result:
+ if not response_data:
raise CustomException(500, "상품 검색에 실패했습니다.", "SEARCH_FAILED")
- return result
+ return response_data
except InvalidItemDataException as e:
raise HTTPException(status_code=e.status_code, detail=e.detail)
except Exception as e:
@@ -50,56 +43,56 @@ async def match(request: RequestSadaguMatch):
"""
try:
match_service = MatchService()
- result = match_service.match_products(request)
+ response_data = match_service.match_products(request)
- if not result:
+ if not response_data:
raise CustomException(500, "상품 매칭에 실패했습니다.", "MATCH_FAILED")
- return result
+ return response_data
except InvalidItemDataException as e:
raise HTTPException(status_code=e.status_code, detail=e.detail)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
-# @router.post(
-# "/similarity", response_model=ResponseSadaguSimilarity, summary="상품 유사도 분석"
-# )
-# async def similarity(request: RequestSadaguSimilarity):
-# """
-# 매칭된 상품들 중 키워드와의 유사도를 계산하여 최적의 상품을 선택합니다.
-# """
-# try:
-# similarity_service = SimilarityService()
-# result = similarity_service.select_product_by_similarity(request)
-#
-# if not result:
-# raise CustomException(
-# 500, "유사도 분석에 실패했습니다.", "SIMILARITY_FAILED"
-# )
-#
-# return result
-# except InvalidItemDataException as e:
-# raise HTTPException(status_code=e.status_code, detail=e.detail)
-# except Exception as e:
-# raise HTTPException(status_code=500, detail=str(e))
+@router.post(
+ "/similarity", response_model=ResponseSadaguSimilarity, summary="상품 유사도 분석"
+)
+async def similarity(request: RequestSadaguSimilarity):
+ """
+ 매칭된 상품들 중 키워드와의 유사도를 계산하여 최적의 상품을 선택합니다.
+ """
+ try:
+ similarity_service = SimilarityService()
+ response_data = similarity_service.select_product_by_similarity(request)
+
+ if not response_data:
+ raise CustomException(
+ 500, "유사도 분석에 실패했습니다.", "SIMILARITY_FAILED"
+ )
+
+ return response_data
+ except InvalidItemDataException as e:
+ raise HTTPException(status_code=e.status_code, detail=e.detail)
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=str(e))
@router.post(
"/crawl", response_model=ResponseSadaguCrawl, summary="상품 상세 정보 크롤링"
)
-async def crawl(request: Request, body: RequestSadaguCrawl):
+async def crawl(body: RequestSadaguCrawl):
"""
상품 상세 페이지를 크롤링하여 상세 정보를 수집합니다.
"""
try:
crawl_service = CrawlService()
- result = await crawl_service.crawl_product_detail(body)
+ response_data = await crawl_service.crawl_product_detail(body)
- if not result:
+ if not response_data:
raise CustomException(500, "상품 크롤링에 실패했습니다.", "CRAWL_FAILED")
- return result
+ return response_data
except InvalidItemDataException as e:
raise HTTPException(status_code=e.status_code, detail=e.detail)
except ItemNotFoundException as e:
diff --git a/apps/pre-processing-service/app/api/endpoints/sample.py b/apps/pre-processing-service/app/api/endpoints/sample.py
new file mode 100644
index 00000000..f6d586fb
--- /dev/null
+++ b/apps/pre-processing-service/app/api/endpoints/sample.py
@@ -0,0 +1,45 @@
+from fastapi import APIRouter
+from ...model.schemas import *
+from app.utils.response import Response
+
+router = APIRouter()
+
+
+@router.get("/")
+async def root():
+ return {"message": "sample API"}
+
+
+@router.post("/keywords/search", summary="네이버 키워드 검색")
+async def search(request: RequestNaverSearch):
+ return Response.ok({"test": "hello world"})
+
+
+@router.post("/blogs/rag/create", summary="RAG 기반 블로그 콘텐츠 생성")
+async def rag_create(request: RequestBlogCreate):
+ return Response.ok({"test": "hello world"})
+
+
+@router.post("/blogs/publish", summary="블로그 콘텐츠 배포")
+async def publish(request: RequestBlogPublish):
+ return Response.ok({"test": "hello world"})
+
+
+@router.post("/products/search", summary="상품 검색")
+async def product_search(request: RequestSadaguSearch):
+ return Response.ok({"test": "hello world"})
+
+
+@router.post("/products/match", summary="상품 매칭")
+async def product_match(request: RequestSadaguMatch):
+ return Response.ok({"test": "hello world"})
+
+
+@router.post("/products/similarity", summary="상품 유사도 분석")
+async def product_similarity(request: RequestSadaguSimilarity):
+ return Response.ok({"test": "hello world"})
+
+
+@router.post("/products/crawl", summary="상품 상세 정보 크롤링")
+async def product_crawl(request: RequestSadaguCrawl):
+ return Response.ok({"test": "hello world"})
diff --git a/apps/pre-processing-service/app/api/endpoints/test.py b/apps/pre-processing-service/app/api/endpoints/test.py
index e26bd203..91977a3f 100644
--- a/apps/pre-processing-service/app/api/endpoints/test.py
+++ b/apps/pre-processing-service/app/api/endpoints/test.py
@@ -21,11 +21,6 @@
router = APIRouter()
-@router.get("/")
-async def root():
- return {"message": "테스트 API"}
-
-
@router.get("/hello/{name}", tags=["hello"])
# @log_api_call
async def say_hello(name: str):
@@ -67,11 +62,6 @@ def with_meta(data: Mapping[str, Any], meta: Mapping[str, Any]) -> Dict[str, Any
@router.get("/tester", response_model=None)
async def processing_tester():
- meta = {
- "job_id": 1,
- "schedule_id": 1,
- "schedule_his_id": 1, # ✅ 타이포 수정
- }
request_dict = {
"tag": "naver",
"category": "50000000",
@@ -79,7 +69,7 @@ async def processing_tester():
"end_date": "2025-09-02",
}
# 네이버 키워드 검색
- naver_request = RequestNaverSearch(**with_meta(meta, request_dict))
+ naver_request = RequestNaverSearch(**with_meta(request_dict))
response_data = await keyword_search(naver_request)
keyword = response_data.get("keyword")
loguru.logger.info(keyword)
@@ -89,21 +79,21 @@ async def processing_tester():
}
# 싸다구 상품 검색
- sadagu_request = RequestSadaguSearch(**with_meta(meta, keyword))
+ sadagu_request = RequestSadaguSearch(**with_meta(keyword))
search_service = SearchService()
keyword_result = await search_service.search_products(sadagu_request)
loguru.logger.info(keyword_result)
# 싸다구 상품 매치
keyword["search_results"] = keyword_result.get("search_results")
- keyword_match_request = RequestSadaguMatch(**with_meta(meta, keyword))
+ keyword_match_request = RequestSadaguMatch(**with_meta(keyword))
match_service = MatchService()
keyword_match_response = match_service.match_products(keyword_match_request)
loguru.logger.info(keyword_match_response)
# 싸다구 상품 유사도 분석
keyword["matched_products"] = keyword_match_response.get("matched_products")
- keyword_similarity_request = RequestSadaguSimilarity(**with_meta(meta, keyword))
+ keyword_similarity_request = RequestSadaguSimilarity(**with_meta(keyword))
# similarity_service = SimilarityService()
# keyword_similarity_response = similarity_service.select_product_by_similarity(
# keyword_similarity_request
diff --git a/apps/pre-processing-service/app/api/router.py b/apps/pre-processing-service/app/api/router.py
index 99286cf6..c1a2fcb4 100644
--- a/apps/pre-processing-service/app/api/router.py
+++ b/apps/pre-processing-service/app/api/router.py
@@ -1,6 +1,6 @@
# app/api/router.py
from fastapi import APIRouter
-from .endpoints import keywords, blog, product, test
+from .endpoints import keywords, blog, product, test, sample
from ..core.config import settings
api_router = APIRouter()
@@ -17,6 +17,8 @@
# 모듈 테스터를 위한 endpoint -> 추후 삭제 예정
api_router.include_router(test.router, prefix="/tests", tags=["Test"])
+api_router.include_router(sample.router, prefix="/v0", tags=["Sample"])
+
@api_router.get("/ping")
async def root():
diff --git a/apps/pre-processing-service/app/core/config.py b/apps/pre-processing-service/app/core/config.py
index ed54cc69..2de3833a 100644
--- a/apps/pre-processing-service/app/core/config.py
+++ b/apps/pre-processing-service/app/core/config.py
@@ -76,10 +76,18 @@ class BaseSettingsConfig(BaseSettings):
db_pass: str
db_name: str
env_name: str
+ app_name: str
# MeCab 사전 경로 (자동 감지)
mecab_path: Optional[str] = None
+ # Loki 설정
+ loki_host: str = "localhost"
+ loki_port: int = 3100
+
+ # 테스트/추가용 필드
+ openai_api_key: Optional[str] = None # << 이 부분 추가
+
def __init__(self, **kwargs):
super().__init__(**kwargs)
diff --git a/apps/pre-processing-service/app/core/logging_config.py b/apps/pre-processing-service/app/core/logging_config.py
new file mode 100644
index 00000000..b2c10d88
--- /dev/null
+++ b/apps/pre-processing-service/app/core/logging_config.py
@@ -0,0 +1,95 @@
+import os
+from loguru import logger
+import sys
+from contextvars import ContextVar
+
+# trace_id context 변수 import
+try:
+ from app.middleware.ServiceLoggerMiddleware import trace_id_context
+except ImportError:
+ # 모듈이 아직 로드되지 않은 경우를 위한 기본값
+ trace_id_context: ContextVar[str] = ContextVar("trace_id", default="")
+
+
+def setup_file_logging():
+ """
+ PromTail을 통해 Loki로 전송하기 위한 파일 로깅 설정
+ """
+ # 기존 loguru 핸들러 제거 (기본 콘솔 출력 제거)
+ logger.remove()
+
+ # 환경변수로 로그 디렉토리 설정 (기본값: logs/develop)
+ log_dir = "../../docker/local/logs/develop"
+
+ # 로그 디렉토리가 없으면 생성
+
+ # 로그 파일 경로 설정
+ log_file_path = log_dir + "/pre-processing-app.log"
+ error_log_file_path = log_dir + "/pre-processing-app-error.log"
+
+ # trace_id를 포함한 간단한 포맷 문자열 사용
+ def add_trace_id_filter(record):
+ try:
+ current_trace_id = trace_id_context.get()
+ if current_trace_id:
+ record["extra"]["trace_id"] = current_trace_id
+ else:
+ record["extra"]["trace_id"] = ""
+ except LookupError:
+ record["extra"]["trace_id"] = ""
+ return record
+
+ # 파일 로깅에서 LoggingMiddleware 제외하는 필터
+ def exclude_logging_middleware_filter(record):
+ # LoggingMiddleware의 로그는 파일에 기록하지 않음
+ if record["name"] == "app.middleware.logging":
+ return False
+ return add_trace_id_filter(record)
+
+ # 파일 로깅 핸들러 추가 - trace_id 포함, LoggingMiddleware 제외
+ logger.add(
+ log_file_path,
+ format="[{extra[trace_id]}] {time:YYYY-MM-DD HH:mm:ss.SSS} | {level} | {name}:{function}:{line} | {message}",
+ level="DEBUG",
+ rotation="100 MB", # 100MB마다 로테이션
+ retention="7 days", # 7일간 보관
+ compression="zip", # 압축
+ enqueue=True, # 멀티프로세스 안전
+ serialize=False, # JSON 직렬화 비활성화 (PromTail에서 파싱)
+ backtrace=True, # 백트레이스 포함
+ diagnose=True, # 진단 정보 포함
+ filter=exclude_logging_middleware_filter,
+ )
+
+ # 에러 레벨 이상은 별도 파일에도 기록 - trace_id 포함, LoggingMiddleware 제외
+ logger.add(
+ error_log_file_path,
+ format="[{extra[trace_id]}] {time:YYYY-MM-DD HH:mm:ss.SSS} | {level} | {name}:{function}:{line} | {message}",
+ level="ERROR",
+ rotation="50 MB",
+ retention="30 days",
+ compression="zip",
+ enqueue=True,
+ serialize=False,
+ backtrace=True,
+ diagnose=True,
+ filter=exclude_logging_middleware_filter,
+ )
+
+ # 개발 환경에서는 콘솔 출력도 유지
+ if os.getenv("ENVIRONMENT", "development") == "development":
+ logger.add(
+ sys.stdout,
+ format="[{extra[trace_id]}] {time:YYYY-MM-DD HH:mm:ss} | {level: <8} | {name}:{function}:{line} | {message}",
+ level="DEBUG",
+ colorize=False, # colorize 비활성화하여 태그 충돌 방지
+ filter=add_trace_id_filter,
+ )
+
+ logger.info("File logging setup completed for PromTail integration")
+ return logger
+
+
+def get_logger():
+ """구성된 로거 인스턴스 반환"""
+ return logger
diff --git a/apps/pre-processing-service/app/main.py b/apps/pre-processing-service/app/main.py
index 9865d845..4bbf3ff1 100644
--- a/apps/pre-processing-service/app/main.py
+++ b/apps/pre-processing-service/app/main.py
@@ -5,6 +5,11 @@
from fastapi.exceptions import RequestValidationError
from app.middleware.ServiceLoggerMiddleware import ServiceLoggerMiddleware
+# 파일 로깅 설정 초기화
+from app.core.logging_config import setup_file_logging
+
+setup_file_logging()
+
# --- 애플리케이션 구성 요소 임포트 ---
from app.api.router import api_router
from app.middleware.logging import LoggingMiddleware
diff --git a/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py b/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py
index acb120fa..30d3475b 100644
--- a/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py
+++ b/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py
@@ -8,8 +8,12 @@
import json
import time
+import asyncio
-trace_id_context: ContextVar[str] = ContextVar("trace_id", default="NO_TRACE_ID")
+from app.middleware.rds_logger import RDSLogger
+from app.middleware.loki_logger import LokiLogger
+
+trace_id_context: ContextVar[str] = ContextVar("trace_id", default="")
class ServiceLoggerMiddleware(BaseHTTPMiddleware):
@@ -18,9 +22,17 @@ class ServiceLoggerMiddleware(BaseHTTPMiddleware):
URL 패턴을 기반으로 자동으로 서비스 타입 식별 및 로깅
"""
- def __init__(self, app, service_mappings: Dict[str, Dict] = None):
+ def __init__(
+ self,
+ app,
+ service_mappings: Dict[str, Dict] = None,
+ enable_rds: bool = True,
+ enable_loki: bool = True,
+ ):
"""
:param service_mappings: URL 패턴별 서비스 설정
+ :param enable_rds: RDS 로깅 활성화 여부
+ :param enable_loki: Loki 로깅 활성화 여부
예: {
"/keywords/search": {
"service_type": "NAVER_CRAWLING",
@@ -31,13 +43,23 @@ def __init__(self, app, service_mappings: Dict[str, Dict] = None):
"""
super().__init__(app)
self.service_mappings = service_mappings or self._default_mappings()
+ self.enable_rds = enable_rds
+ self.enable_loki = enable_loki
+
+ # 로거 인스턴스 초기화
+ self.rds_logger = RDSLogger() if enable_rds else None
+ # Loki 직접 로깅 비활성화 - PromTail을 통해 파일로 로깅
+ # self.loki_logger = LokiLogger() if enable_loki else None
+ self.loki_logger = None
def _default_mappings(self) -> Dict[str, Dict]:
"""기본 서비스 매핑 설정"""
return {
+ # 네이버 키워드 검색
"/keywords/search": {
"service_type": "NAVER_CRAWLING",
"track_params": [
+ "tag",
"keyword",
"category",
"startDate",
@@ -45,25 +67,121 @@ def _default_mappings(self) -> Dict[str, Dict]:
"job_id",
"schedule_id",
],
- "response_trackers": ["keyword", "total_keywords", "results_count"],
+ "response_trackers": ["keyword", "total_keyword", "success", "status"],
},
+ # 블로그 RAG 콘텐츠 생성
+ "/blogs/rag/create": {
+ "service_type": "BLOG_RAG_CREATE",
+ "track_params": [
+ "keyword",
+ "product_info",
+ "content_type",
+ "target_length",
+ "job_id",
+ "schedule_id",
+ "schedule_his_id",
+ ],
+ "response_trackers": [
+ "title",
+ "content_length",
+ "tags_count",
+ "success",
+ "status",
+ ],
+ },
+ # 블로그 배포
"/blogs/publish": {
"service_type": "BLOG_PUBLISH",
"track_params": [
"tag",
- "title",
- "content",
- "tags",
+ "blog_id",
+ "post_title",
+ "post_content",
+ "post_tags",
+ "job_id",
+ "schedule_id",
+ "schedule_his_id",
+ ],
+ "response_trackers": [
+ "tag",
+ "post_title",
+ "post_url",
+ "published_at",
+ "publish_success",
+ "metadata",
+ "success",
+ "status",
+ ],
+ },
+ # 상품 검색
+ "/products/search": {
+ "service_type": "PRODUCT_SEARCH",
+ "track_params": [
+ "keyword",
"job_id",
"schedule_id",
"schedule_his_id",
],
"response_trackers": [
+ "keyword",
+ "search_results_count",
+ "success",
+ "status",
+ ],
+ },
+ # 상품 매칭
+ "/products/match": {
+ "service_type": "PRODUCT_MATCH",
+ "track_params": [
+ "keyword",
+ "search_results",
"job_id",
"schedule_id",
"schedule_his_id",
+ ],
+ "response_trackers": [
+ "keyword",
+ "matched_products_count",
+ "success",
+ "status",
+ ],
+ },
+ # 상품 유사도 분석
+ "/products/similarity": {
+ "service_type": "PRODUCT_SIMILARITY",
+ "track_params": [
+ "keyword",
+ "matched_products",
+ "search_results",
+ "job_id",
+ "schedule_id",
+ "schedule_his_id",
+ ],
+ "response_trackers": [
+ "keyword",
+ "selected_product",
+ "reason",
+ "success",
+ "status",
+ ],
+ },
+ # 상품 크롤링
+ "/products/crawl": {
+ "service_type": "PRODUCT_CRAWL",
+ "track_params": [
+ "tag",
+ "product_url",
+ "job_id",
+ "schedule_id",
+ "schedule_his_id",
+ ],
+ "response_trackers": [
+ "tag",
+ "product_url",
+ "product_detail",
+ "crawled_at",
+ "success",
"status",
- "metadata",
],
},
}
@@ -78,7 +196,8 @@ async def dispatch(self, request: Request, call_next):
return await call_next(request)
# 2. 시작 로깅
- trace_id = trace_id_context.get("NO_TRACE_ID")
+ trace_id = request.headers.get("X-Request-ID", "")
+ trace_id_context.set(trace_id)
start_time = time.time()
# 파라미터 추출 및 시작 로그
@@ -91,11 +210,23 @@ async def dispatch(self, request: Request, call_next):
service_type = service_config["service_type"]
logger.info(f"[{service_type}_START] trace_id={trace_id}{param_str}")
+ # source_id 추출 (job_id, schedule_id 등에서)
+ source_id = self._extract_source_id(params)
+ run_id = params.get("run_id")
+
+ # RDS 및 Loki에 시작 로그 전송
+ start_message = f"[{service_type}_START]{param_str}"
+ await self._log_to_external_systems(
+ "start", service_type, source_id, trace_id, start_message, run_id, params
+ )
+
# 3. 요청 처리
try:
response = await call_next(request)
# 4. 성공 로깅
+ duration_ms = int((time.time() - start_time) * 1000)
+
if 200 <= response.status_code < 300:
await self._log_success_response(
service_type,
@@ -105,16 +236,60 @@ async def dispatch(self, request: Request, call_next):
response,
service_config["response_trackers"],
)
+
+ # 외부 로깅 시스템에 성공 로그 전송
+ success_message = f"[{service_type}_SUCCESS]{param_str} status_code={response.status_code}"
+ await self._log_to_external_systems(
+ "success",
+ service_type,
+ source_id,
+ trace_id,
+ success_message,
+ run_id,
+ params,
+ duration_ms=duration_ms,
+ )
else:
await self._log_error_response(
service_type, trace_id, start_time, param_str, response
)
+ # 외부 로깅 시스템에 에러 로그 전송
+ error_message = f"[{service_type}_ERROR]{param_str} status_code={response.status_code}"
+ await self._log_to_external_systems(
+ "error",
+ service_type,
+ source_id,
+ trace_id,
+ error_message,
+ run_id,
+ params,
+ duration_ms=duration_ms,
+ error_code=f"HTTP_{response.status_code}",
+ )
+
return response
except Exception as e:
# 5. 예외 로깅
+ duration_ms = int((time.time() - start_time) * 1000)
await self._log_exception(service_type, trace_id, start_time, param_str, e)
+
+ # 외부 로깅 시스템에 예외 로그 전송
+ exception_message = (
+ f"[{service_type}_EXCEPTION]{param_str} exception={str(e)}"
+ )
+ await self._log_to_external_systems(
+ "error",
+ service_type,
+ source_id,
+ trace_id,
+ exception_message,
+ run_id,
+ params,
+ duration_ms=duration_ms,
+ error_code="EXCEPTION",
+ )
raise
def _get_service_config(self, url_path: str) -> Optional[Dict]:
@@ -248,3 +423,94 @@ async def _log_exception(
f"execution_time={duration:.4f}s{param_str} "
f"exception={str(exception)}"
)
+
+ def _extract_source_id(self, params: Dict[str, Any]) -> int:
+ """파라미터에서 source_id 추출 (job_id, schedule_id 등 우선순위)"""
+ for key in ["job_id", "schedule_id", "task_id", "workflow_id"]:
+ if key in params and params[key]:
+ try:
+ return int(params[key])
+ except (ValueError, TypeError):
+ continue
+ return 0 # 기본값
+
+ async def _log_to_external_systems(
+ self,
+ log_type: str, # start, success, error
+ service_type: str,
+ source_id: int,
+ trace_id: str,
+ message: str,
+ run_id: Optional[int] = None,
+ params: Optional[Dict[str, Any]] = None,
+ duration_ms: Optional[int] = None,
+ error_code: Optional[str] = None,
+ ):
+ """RDS와 Loki에 로그 전송"""
+ tasks = []
+
+ # 로깅할 추가 데이터 준비
+ additional_data = params.copy() if params else {}
+
+ if self.rds_logger:
+ if log_type == "start":
+ task = self.rds_logger.log_start(
+ service_type, source_id, trace_id, message, run_id, additional_data
+ )
+ elif log_type == "success":
+ task = self.rds_logger.log_success(
+ service_type,
+ source_id,
+ trace_id,
+ message,
+ duration_ms,
+ run_id,
+ additional_data,
+ )
+ elif log_type == "error":
+ task = self.rds_logger.log_error(
+ service_type,
+ source_id,
+ trace_id,
+ message,
+ error_code,
+ duration_ms,
+ run_id,
+ additional_data,
+ )
+ tasks.append(task)
+
+ if self.loki_logger:
+ if log_type == "start":
+ task = self.loki_logger.log_start(
+ service_type, source_id, trace_id, message, run_id, additional_data
+ )
+ elif log_type == "success":
+ task = self.loki_logger.log_success(
+ service_type,
+ source_id,
+ trace_id,
+ message,
+ duration_ms,
+ run_id,
+ additional_data,
+ )
+ elif log_type == "error":
+ task = self.loki_logger.log_error(
+ service_type,
+ source_id,
+ trace_id,
+ message,
+ error_code,
+ duration_ms,
+ run_id,
+ additional_data,
+ )
+ tasks.append(task)
+
+ # 비동기로 병렬 실행 (로깅 실패가 메인 로직에 영향을 주지 않도록)
+ if tasks:
+ try:
+ await asyncio.gather(*tasks, return_exceptions=True)
+ except Exception as e:
+ logger.debug(f"외부 로깅 시스템 전송 중 일부 실패: {e}")
diff --git a/apps/pre-processing-service/app/middleware/logging.py b/apps/pre-processing-service/app/middleware/logging.py
index 9a8cb6a0..15bfd757 100644
--- a/apps/pre-processing-service/app/middleware/logging.py
+++ b/apps/pre-processing-service/app/middleware/logging.py
@@ -3,11 +3,24 @@
from loguru import logger
from starlette.middleware.base import BaseHTTPMiddleware
+# trace_id context 변수 import
+try:
+ from app.middleware.ServiceLoggerMiddleware import trace_id_context
+except ImportError:
+ from contextvars import ContextVar
+
+ trace_id_context: ContextVar[str] = ContextVar("trace_id", default="")
+
class LoggingMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next):
start_time = time.time()
+ # trace_id 설정 (X-Request-ID 헤더에서)
+ current_trace_id = request.headers.get("X-Request-ID", "")
+ if current_trace_id:
+ trace_id_context.set(current_trace_id)
+
# 1. 요청 시작 로그
logger.info(
"요청 시작: IP='{}' 메서드='{}' URL='{}'",
diff --git a/apps/pre-processing-service/app/middleware/loki_logger.py b/apps/pre-processing-service/app/middleware/loki_logger.py
new file mode 100644
index 00000000..0a4d603c
--- /dev/null
+++ b/apps/pre-processing-service/app/middleware/loki_logger.py
@@ -0,0 +1,198 @@
+import json
+import aiohttp
+import asyncio
+from typing import Dict, List, Any, Optional
+from datetime import datetime
+from loguru import logger
+
+from app.model.execution_log import ExecutionLog
+from app.core.config import settings
+
+
+class LokiLogger:
+ """Loki에 로그를 전송하는 클래스"""
+
+ def __init__(self):
+ self.loki_url = f"{settings.loki_host}:{settings.loki_port}/loki/api/v1/push"
+ self.app_name = settings.app_name
+ self.session = None
+
+ async def _get_session(self) -> aiohttp.ClientSession:
+ """aiohttp 세션 관리"""
+ if self.session is None or self.session.closed:
+ self.session = aiohttp.ClientSession()
+ return self.session
+
+ async def close(self):
+ """세션 종료"""
+ if self.session and not self.session.closed:
+ await self.session.close()
+
+ async def send_log(
+ self,
+ execution_type: str,
+ source_id: int,
+ log_level: str,
+ log_message: str,
+ trace_id: Optional[str] = None,
+ run_id: Optional[int] = None,
+ status: Optional[str] = None,
+ duration_ms: Optional[int] = None,
+ error_code: Optional[str] = None,
+ additional_data: Optional[dict] = None,
+ ) -> bool:
+ """
+ Loki로 로그 전송
+
+ Args:
+ execution_type: task, schedule, job, workflow
+ source_id: 모든 데이터에 대한 ID
+ log_level: INFO, ERROR, WARNING, DEBUG
+ log_message: 로그 메시지
+ trace_id: 추적 ID
+ run_id: 실행 ID
+ status: SUCCESS, ERROR, RUNNING, PENDING
+ duration_ms: 실행 시간(밀리초)
+ error_code: 에러 코드
+ additional_data: 추가 데이터
+
+ Returns:
+ bool: 전송 성공 여부
+ """
+ try:
+ execution_log = ExecutionLog(
+ execution_type=execution_type,
+ source_id=source_id,
+ log_level=log_level,
+ executed_at=datetime.now(),
+ log_message=log_message,
+ trace_id=trace_id,
+ run_id=run_id,
+ status=status,
+ duration_ms=duration_ms,
+ error_code=error_code,
+ reserved4=additional_data,
+ )
+
+ loki_data = execution_log.to_loki_format(self.app_name)
+
+ # Loki push API 형식으로 변환
+ payload = {
+ "streams": [
+ {
+ "stream": loki_data["labels"],
+ "values": [
+ [
+ str(loki_data["log"]["timestamp"]),
+ json.dumps(loki_data["log"], ensure_ascii=False),
+ ]
+ ],
+ }
+ ]
+ }
+
+ session = await self._get_session()
+
+ async with session.post(
+ self.loki_url,
+ json=payload,
+ headers={"Content-Type": "application/json"},
+ timeout=aiohttp.ClientTimeout(total=5),
+ ) as response:
+ if response.status == 204:
+ # logger.debug(f"Loki 로그 전송 성공: {execution_type} - {log_message[:50]}...")
+ return True
+ else:
+ response_text = await response.text()
+ logger.error(
+ f"Loki 로그 전송 실패: status={response.status}, response={response_text}"
+ )
+ return False
+
+ except asyncio.TimeoutError:
+ logger.error("Loki 로그 전송 타임아웃")
+ return False
+ except Exception as e:
+ logger.error(f"Loki 로그 전송 실패: {str(e)}")
+ return False
+
+ async def log_start(
+ self,
+ execution_type: str,
+ source_id: int,
+ trace_id: str,
+ log_message: str,
+ run_id: Optional[int] = None,
+ additional_data: Optional[dict] = None,
+ ) -> bool:
+ """시작 로그 전송"""
+ return await self.send_log(
+ execution_type=execution_type,
+ source_id=source_id,
+ log_level="INFO",
+ log_message=log_message,
+ trace_id=trace_id,
+ run_id=run_id,
+ status="RUNNING",
+ additional_data=additional_data,
+ )
+
+ async def log_success(
+ self,
+ execution_type: str,
+ source_id: int,
+ trace_id: str,
+ log_message: str,
+ duration_ms: int,
+ run_id: Optional[int] = None,
+ additional_data: Optional[dict] = None,
+ ) -> bool:
+ """성공 로그 전송"""
+ return await self.send_log(
+ execution_type=execution_type,
+ source_id=source_id,
+ log_level="INFO",
+ log_message=log_message,
+ trace_id=trace_id,
+ run_id=run_id,
+ status="SUCCESS",
+ duration_ms=duration_ms,
+ additional_data=additional_data,
+ )
+
+ async def log_error(
+ self,
+ execution_type: str,
+ source_id: int,
+ trace_id: str,
+ log_message: str,
+ error_code: str,
+ duration_ms: Optional[int] = None,
+ run_id: Optional[int] = None,
+ additional_data: Optional[dict] = None,
+ ) -> bool:
+ """에러 로그 전송"""
+ return await self.send_log(
+ execution_type=execution_type,
+ source_id=source_id,
+ log_level="ERROR",
+ log_message=log_message,
+ trace_id=trace_id,
+ run_id=run_id,
+ status="ERROR",
+ duration_ms=duration_ms,
+ error_code=error_code,
+ additional_data=additional_data,
+ )
+
+ def __del__(self):
+ """소멸자에서 세션 정리"""
+ if self.session and not self.session.closed:
+ try:
+ loop = asyncio.get_event_loop()
+ if loop.is_running():
+ loop.create_task(self.session.close())
+ else:
+ loop.run_until_complete(self.session.close())
+ except:
+ pass
diff --git a/apps/pre-processing-service/app/middleware/rds_logger.py b/apps/pre-processing-service/app/middleware/rds_logger.py
new file mode 100644
index 00000000..66bad19c
--- /dev/null
+++ b/apps/pre-processing-service/app/middleware/rds_logger.py
@@ -0,0 +1,153 @@
+from typing import Optional
+from datetime import datetime
+import traceback
+from loguru import logger
+
+from app.db.mariadb_manager import MariadbManager
+from app.model.execution_log import ExecutionLog
+
+
+class RDSLogger:
+ """RDS(MariaDB)에 로그를 저장하는 클래스"""
+
+ def __init__(self):
+ self.db_manager = MariadbManager()
+
+ async def log_execution(
+ self,
+ execution_type: str,
+ source_id: int,
+ log_level: str,
+ log_message: str,
+ trace_id: Optional[str] = None,
+ run_id: Optional[int] = None,
+ status: Optional[str] = None,
+ duration_ms: Optional[int] = None,
+ error_code: Optional[str] = None,
+ additional_data: Optional[dict] = None,
+ ) -> bool:
+ """
+ execution_log 테이블에 로그 저장
+
+ Args:
+ execution_type: task, schedule, job, workflow
+ source_id: 모든 데이터에 대한 ID
+ log_level: INFO, ERROR, WARNING, DEBUG
+ log_message: 로그 메시지
+ trace_id: 추적 ID
+ run_id: 실행 ID
+ status: SUCCESS, ERROR, RUNNING, PENDING
+ duration_ms: 실행 시간(밀리초)
+ error_code: 에러 코드
+ additional_data: 추가 데이터 (reserved4에 JSON으로 저장)
+
+ Returns:
+ bool: 저장 성공 여부
+ """
+ try:
+ execution_log = ExecutionLog(
+ execution_type=execution_type,
+ source_id=source_id,
+ log_level=log_level,
+ executed_at=datetime.now(),
+ log_message=log_message,
+ trace_id=trace_id,
+ run_id=run_id,
+ status=status,
+ duration_ms=duration_ms,
+ error_code=error_code,
+ reserved4=additional_data,
+ )
+
+ log_data = execution_log.to_dict()
+
+ # 컬럼명과 값 분리
+ columns = list(log_data.keys())
+ values = list(log_data.values())
+ placeholders = ", ".join(["%s"] * len(values))
+ columns_str = ", ".join(columns)
+
+ insert_query = f"""
+ INSERT INTO execution_log ({columns_str})
+ VALUES ({placeholders})
+ """
+
+ with self.db_manager.get_cursor() as cursor:
+ cursor.execute(insert_query, values)
+
+ # logger.debug(f"RDS 로그 저장 성공: {execution_type} - {log_message[:50]}...")
+ return True
+
+ except Exception as e:
+ logger.error(f"RDS 로그 저장 실패: {str(e)}")
+ logger.error(f"Traceback: {traceback.format_exc()}")
+ return False
+
+ async def log_start(
+ self,
+ execution_type: str,
+ source_id: int,
+ trace_id: str,
+ log_message: str,
+ run_id: Optional[int] = None,
+ additional_data: Optional[dict] = None,
+ ) -> bool:
+ """시작 로그 저장"""
+ return await self.log_execution(
+ execution_type=execution_type,
+ source_id=source_id,
+ log_level="INFO",
+ log_message=log_message,
+ trace_id=trace_id,
+ run_id=run_id,
+ status="RUNNING",
+ additional_data=additional_data,
+ )
+
+ async def log_success(
+ self,
+ execution_type: str,
+ source_id: int,
+ trace_id: str,
+ log_message: str,
+ duration_ms: int,
+ run_id: Optional[int] = None,
+ additional_data: Optional[dict] = None,
+ ) -> bool:
+ """성공 로그 저장"""
+ return await self.log_execution(
+ execution_type=execution_type,
+ source_id=source_id,
+ log_level="INFO",
+ log_message=log_message,
+ trace_id=trace_id,
+ run_id=run_id,
+ status="SUCCESS",
+ duration_ms=duration_ms,
+ additional_data=additional_data,
+ )
+
+ async def log_error(
+ self,
+ execution_type: str,
+ source_id: int,
+ trace_id: str,
+ log_message: str,
+ error_code: str,
+ duration_ms: Optional[int] = None,
+ run_id: Optional[int] = None,
+ additional_data: Optional[dict] = None,
+ ) -> bool:
+ """에러 로그 저장"""
+ return await self.log_execution(
+ execution_type=execution_type,
+ source_id=source_id,
+ log_level="ERROR",
+ log_message=log_message,
+ trace_id=trace_id,
+ run_id=run_id,
+ status="ERROR",
+ duration_ms=duration_ms,
+ error_code=error_code,
+ additional_data=additional_data,
+ )
diff --git a/apps/pre-processing-service/app/model/execution_log.py b/apps/pre-processing-service/app/model/execution_log.py
new file mode 100644
index 00000000..c1bef2e0
--- /dev/null
+++ b/apps/pre-processing-service/app/model/execution_log.py
@@ -0,0 +1,79 @@
+from dataclasses import dataclass
+from datetime import datetime
+from typing import Optional, Dict, Any
+import json
+
+
+@dataclass
+class ExecutionLog:
+ """execution_log 테이블에 대응하는 데이터 모델"""
+
+ execution_type: str # task, schedule, job, workflow
+ source_id: int # 모든 데이터에 대한 ID
+ log_level: str # INFO, ERROR, WARNING, DEBUG
+ executed_at: datetime
+ log_message: str
+ span_id: str = "" # 테스트값
+ trace_id: Optional[str] = None
+ run_id: Optional[int] = None
+ status: Optional[str] = None # SUCCESS, ERROR, RUNNING, PENDING
+ duration_ms: Optional[int] = None
+ error_code: Optional[str] = None
+ reserved1: Optional[str] = None
+ reserved2: Optional[str] = None
+ reserved3: Optional[int] = None
+ reserved4: Optional[Dict[str, Any]] = None # JSON 데이터
+ reserved5: Optional[datetime] = None
+ id: Optional[int] = None # auto_increment
+
+ def to_dict(self) -> Dict[str, Any]:
+ """딕셔너리로 변환 (DB 삽입용)"""
+ data = {
+ "execution_type": self.execution_type,
+ "source_id": self.source_id,
+ "log_level": self.log_level,
+ "executed_at": self.executed_at,
+ "log_message": self.log_message,
+ "trace_id": self.trace_id,
+ "run_id": self.run_id,
+ "status": self.status,
+ "duration_ms": self.duration_ms,
+ "error_code": self.error_code,
+ "reserved1": self.span_id,
+ "reserved2": self.reserved2,
+ "reserved3": self.reserved3,
+ "reserved4": json.dumps(self.reserved4) if self.reserved4 else None,
+ "reserved5": self.reserved5,
+ }
+ return {k: v for k, v in data.items() if v is not None}
+
+ def to_loki_format(
+ self, app_name: str = "pre-processing-service"
+ ) -> Dict[str, Any]:
+ """Loki 형식으로 변환"""
+
+ labels = {
+ "app": app_name,
+ "env": "develop",
+ "traceId": self.trace_id or "",
+ "spanId": self.span_id, # 필요시 추가
+ "executionType": self.execution_type,
+ "sourceId": str(self.source_id),
+ "runId": str(self.run_id) if self.run_id else "",
+ }
+
+ log_data = {
+ "timestamp": int(self.executed_at.timestamp() * 1000000000), # nanoseconds
+ "level": self.log_level,
+ "message": self.log_message,
+ "execution_type": self.execution_type,
+ "source_id": self.source_id,
+ "status": self.status,
+ "duration_ms": self.duration_ms,
+ "error_code": self.error_code,
+ }
+
+ if self.reserved4:
+ log_data.update(self.reserved4)
+
+ return {"labels": labels, "log": log_data}
diff --git a/apps/pre-processing-service/app/model/schemas.py b/apps/pre-processing-service/app/model/schemas.py
index 61720cb6..18d0d99f 100644
--- a/apps/pre-processing-service/app/model/schemas.py
+++ b/apps/pre-processing-service/app/model/schemas.py
@@ -1,70 +1,71 @@
from datetime import datetime
-from typing import Optional, List, Dict, Any
+from typing import Optional, List, Dict, Any, TypeVar, Generic
from pydantic import BaseModel, Field, HttpUrl
+# 제네릭 타입 변수 정의
+T = TypeVar("T")
+
# 기본 요청
class RequestBase(BaseModel):
- job_id: int = Field(
- ..., title="작업 ID", description="현재 실행 중인 작업의 고유 식별자"
- )
- schedule_id: int = Field(
- ..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자"
- )
- schedule_his_id: Optional[int] = Field(
- None, title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자"
- )
+ pass
# 기본 응답
-class ResponseBase(BaseModel):
- job_id: int = Field(
- ..., title="작업 ID", description="현재 실행 중인 작업의 고유 식별자"
- )
- schedule_id: int = Field(
- ..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자"
- )
- schedule_his_id: Optional[int] = Field(
- None, title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자"
- )
+class ResponseBase(BaseModel, Generic[T]):
+ success: bool = Field(..., title="성공유무", description="true,false")
+ data: T = Field(..., title="응답 데이터")
status: str = Field(..., title="상태", description="요청 처리 상태")
+ message: str = Field(..., title="메시지", description="메시지입니다.")
+
+
+# ============== 1단계: 네이버 키워드 추출 ==============
-# 네이버 키워드 추출
class RequestNaverSearch(RequestBase):
tag: str = Field(..., title="태그", description="데이터랩/스토어 태그 구분")
- category: Optional[str] = Field(
- None, title="카테고리", description="검색할 카테고리"
- )
- start_date: Optional[str] = Field(
- None, title="시작일", description="검색 시작 날짜 (YYYY-MM-DD)"
- )
- end_date: Optional[str] = Field(
- None, title="종료일", description="검색 종료 날짜 (YYYY-MM-DD)"
- )
-class ResponseNaverSearch(ResponseBase):
- category: Optional[str] = Field(None, title="카테고리", description="검색 카테고리")
+# 응답 데이터 모델
+class NaverSearchData(BaseModel):
keyword: str = Field(..., title="키워드", description="검색에 사용된 키워드")
total_keyword: Dict[int, str] = Field(
..., title="총 키워드", description="키워드별 총 검색 결과"
)
-# 2단계: 검색
+# 최종 응답 모델
+class ResponseNaverSearch(ResponseBase[NaverSearchData]):
+ """네이버 키워드 검색 API 응답"""
+
+ pass
+
+
+# ============== 2단계: 사다구 검색 ==============
+
+
class RequestSadaguSearch(RequestBase):
keyword: str = Field(..., title="검색 키워드", description="상품을 검색할 키워드")
-class ResponseSadaguSearch(ResponseBase):
+# 응답 데이터 모델
+class SadaguSearchData(BaseModel):
keyword: str = Field(..., title="검색 키워드", description="검색에 사용된 키워드")
search_results: List[Dict] = Field(
..., title="검색 결과", description="검색된 상품 목록"
)
-# 3단계: 매칭
+# 최종 응답 모델
+class ResponseSadaguSearch(ResponseBase[SadaguSearchData]):
+ """사다구 상품 검색 API 응답"""
+
+ pass
+
+
+# ============== 3단계: 사다구 매칭 ==============
+
+
class RequestSadaguMatch(RequestBase):
keyword: str = Field(..., title="매칭 키워드", description="상품과 매칭할 키워드")
search_results: List[Dict] = Field(
@@ -72,14 +73,24 @@ class RequestSadaguMatch(RequestBase):
)
-class ResponseSadaguMatch(ResponseBase):
+# 응답 데이터 모델
+class SadaguMatchData(BaseModel):
keyword: str = Field(..., title="매칭 키워드", description="매칭에 사용된 키워드")
matched_products: List[Dict] = Field(
..., title="매칭된 상품", description="키워드와 매칭된 상품 목록"
)
-# 4단계: 유사도
+# 최종 응답 모델
+class ResponseSadaguMatch(ResponseBase[SadaguMatchData]):
+ """사다구 상품 매칭 API 응답"""
+
+ pass
+
+
+# ============== 4단계: 사다구 유사도 ==============
+
+
class RequestSadaguSimilarity(RequestBase):
keyword: str = Field(
..., title="유사도 분석 키워드", description="유사도 분석할 키워드"
@@ -94,7 +105,8 @@ class RequestSadaguSimilarity(RequestBase):
)
-class ResponseSadaguSimilarity(ResponseBase):
+# 응답 데이터 모델
+class SadaguSimilarityData(BaseModel):
keyword: str = Field(
..., title="분석 키워드", description="유사도 분석에 사용된 키워드"
)
@@ -106,7 +118,16 @@ class ResponseSadaguSimilarity(ResponseBase):
)
-# 사다구몰 크롤링
+# 최종 응답 모델
+class ResponseSadaguSimilarity(ResponseBase[SadaguSimilarityData]):
+ """사다구 상품 유사도 분석 API 응답"""
+
+ pass
+
+
+# ============== 사다구몰 크롤링 ==============
+
+
class RequestSadaguCrawl(RequestBase):
tag: str = Field(
...,
@@ -118,7 +139,8 @@ class RequestSadaguCrawl(RequestBase):
)
-class ResponseSadaguCrawl(ResponseBase):
+# 응답 데이터 모델
+class SadaguCrawlData(BaseModel):
tag: str = Field(..., title="크롤링 태그", description="크롤링 유형 태그")
product_url: str = Field(..., title="상품 URL", description="크롤링된 상품 URL")
product_detail: Optional[Dict] = Field(
@@ -129,30 +151,79 @@ class ResponseSadaguCrawl(ResponseBase):
)
-# 블로그 콘텐츠 생성
-class RequestBlogCreate(RequestBase):
+# 최종 응답 모델
+class ResponseSadaguCrawl(ResponseBase[SadaguCrawlData]):
+ """사다구몰 크롤링 API 응답"""
+
pass
-class ResponseBlogCreate(ResponseBase):
+# ============== 블로그 콘텐츠 생성 ==============
+
+
+class RequestBlogCreate(RequestBase):
+ keyword: Optional[str] = Field(
+ None, title="키워드", description="콘텐츠 생성용 키워드"
+ )
+ product_info: Optional[Dict] = Field(
+ None, title="상품 정보", description="블로그 콘텐츠에 포함할 상품 정보"
+ )
+ content_type: Optional[str] = Field(
+ None, title="콘텐츠 타입", description="생성할 콘텐츠 유형"
+ )
+ target_length: Optional[int] = Field(
+ None, title="목표 글자 수", description="생성할 콘텐츠의 목표 길이"
+ )
+
+
+# 응답 데이터 모델
+class BlogCreateData(BaseModel):
+ title: str = Field(..., title="블로그 제목", description="생성된 블로그 제목")
+ content: str = Field(..., title="블로그 내용", description="생성된 블로그 내용")
+ tags: List[str] = Field(
+ default_factory=list, title="추천 태그", description="콘텐츠에 적합한 태그 목록"
+ )
+
+
+# 최종 응답 모델
+class ResponseBlogCreate(ResponseBase[BlogCreateData]):
+ """블로그 콘텐츠 생성 API 응답"""
+
pass
-# 블로그 배포
+# ============== 블로그 배포 ==============
+
+
class RequestBlogPublish(RequestBase):
tag: str = Field(..., title="블로그 태그", description="블로그 플랫폼 종류")
blog_id: str = Field(..., description="블로그 아이디")
blog_pw: str = Field(..., description="블로그 비밀번호")
post_title: str = Field(..., description="포스팅 제목")
post_content: str = Field(..., description="포스팅 내용")
- post_tags: List[str] = Field(default=[], description="포스팅 태그 목록")
+ post_tags: List[str] = Field(default_factory=list, description="포스팅 태그 목록")
-class ResponseBlogPublish(ResponseBase):
- # 디버깅 용
+# 응답 데이터 모델
+class BlogPublishData(BaseModel):
+ tag: str = Field(..., title="블로그 태그", description="블로그 플랫폼 종류")
+ post_title: str = Field(..., title="포스팅 제목", description="배포된 포스팅 제목")
+ post_url: Optional[str] = Field(
+ None, title="포스팅 URL", description="배포된 포스팅 URL"
+ )
+ published_at: Optional[str] = Field(
+ None, title="배포 시간", description="포스팅 배포 완료 시간"
+ )
+ publish_success: bool = Field(..., title="배포 성공 여부")
+
+ # 디버깅 용 (Optional로 변경)
metadata: Optional[Dict[str, Any]] = Field(
None, description="포스팅 관련 메타데이터"
)
- # 프로덕션 용
- # post_url: str = Field(..., description="포스팅 URL")
+
+# 최종 응답 모델
+class ResponseBlogPublish(ResponseBase[BlogPublishData]):
+ """블로그 배포 API 응답"""
+
+ pass
diff --git a/apps/pre-processing-service/app/service/blog/base_blog_post_service.py b/apps/pre-processing-service/app/service/blog/base_blog_post_service.py
index ff4b2754..f55bdba0 100644
--- a/apps/pre-processing-service/app/service/blog/base_blog_post_service.py
+++ b/apps/pre-processing-service/app/service/blog/base_blog_post_service.py
@@ -1,5 +1,5 @@
from abc import ABC, abstractmethod
-from typing import Dict, List, Optional
+from typing import Dict
from app.utils.crawling_util import CrawlingUtil
from app.errors.BlogPostingException import *
@@ -11,51 +11,39 @@ class BaseBlogPostService(ABC):
블로그 포스팅 서비스 추상 클래스
"""
- def __init__(self, config_file="blog_config.json"):
- """공통 초기화 로직"""
- # Selenium 기반 서비스를 위한 초기화
- if self._requires_webdriver():
+ def __init__(self, use_webdriver=True):
+ """
+ 공통 초기화 로직
+ :param use_webdriver: 웹드라이버 사용 여부 (API 서비스의 경우 False)
+ """
+ self.use_webdriver = use_webdriver
+
+ if self.use_webdriver:
try:
- self.crawling_service = CrawlingUtil()
+ # 블로그 포스팅용 설정으로 초기화
+ self.crawling_service = CrawlingUtil(
+ headless=False, # 네이버 탐지 우회를 위해 headless 비활성화
+ for_blog_posting=True,
+ )
self.web_driver = self.crawling_service.get_driver()
self.wait_driver = self.crawling_service.get_wait()
except Exception:
raise WebDriverConnectionException()
else:
- # API 기반 서비스의 경우 WebDriver가 필요 없음
self.crawling_service = None
self.web_driver = None
self.wait_driver = None
- # API 기반 서비스를 위한 초기화
- self.config_file = config_file
- self.config = {}
- self.current_upload_account = None
-
- # API 관련 속성들 (사용하지 않는 서비스에서는 None으로 유지)
- self.blogger_service = None
- self.blog_id = None
- self.scopes = None
-
self._load_config()
- def _requires_webdriver(self) -> bool:
- """
- 서브클래스에서 WebDriver가 필요한지 여부를 반환
- 기본값은 True (Selenium 기반), API 기반 서비스에서는 False로 오버라이드
- """
- return True
-
@abstractmethod
def _load_config(self) -> None:
"""플랫폼별 설정 로드"""
pass
+ @abstractmethod
def _login(self) -> None:
- """
- 플랫폼별 로그인 구현 (API 기반 서비스의 경우 인증으로 대체)
- 기본 구현은 아무것도 하지 않음 (API 서비스용)
- """
+ """플랫폼별 로그인 구현"""
pass
@abstractmethod
@@ -83,6 +71,14 @@ def _validate_content(
:param content: 포스트 내용
:param tags: 포스트 태그 리스트
"""
+ # if not title or not title.strip():
+ # raise BlogContentValidationException("title", "제목이 비어있습니다")
+ #
+ # if not content or not content.strip():
+ # raise BlogContentValidationException("content", "내용이 비어있습니다")
+ #
+ # if tags is None:
+ # raise BlogContentValidationException("tags", "태그가 비어있습니다")
pass
def post_content(self, title: str, content: str, tags: List[str] = None) -> Dict:
@@ -96,7 +92,7 @@ def post_content(self, title: str, content: str, tags: List[str] = None) -> Dict
# 1. 콘텐츠 유효성 검사
self._validate_content(title, content, tags)
- # 2. 로그인 (Selenium 기반) 또는 인증 (API 기반)
+ # 2. 로그인
self._login()
# 3. 포스트 작성 및 발행
diff --git a/apps/pre-processing-service/app/service/blog/blog_create_service.py b/apps/pre-processing-service/app/service/blog/blog_create_service.py
new file mode 100644
index 00000000..29ce12b7
--- /dev/null
+++ b/apps/pre-processing-service/app/service/blog/blog_create_service.py
@@ -0,0 +1,345 @@
+import json
+import logging
+import os
+from datetime import datetime
+from typing import Dict, List, Optional, Any
+
+from openai import OpenAI
+from dotenv import load_dotenv
+
+from app.model.schemas import RequestBlogCreate
+from app.errors.BlogPostingException import *
+
+# 환경변수 로드
+load_dotenv(".env.dev")
+
+
+class BlogContentService:
+ """RAG를 사용한 블로그 콘텐츠 생성 전용 서비스"""
+
+ def __init__(self):
+ # OpenAI API 키 설정
+ self.openai_api_key = os.getenv("OPENAI_API_KEY")
+ if not self.openai_api_key:
+ raise ValueError("OPENAI_API_KEY가 .env.dev 파일에 설정되지 않았습니다.")
+
+ # 인스턴스 레벨에서 클라이언트 생성
+ self.client = OpenAI(api_key=self.openai_api_key)
+ logging.basicConfig(level=logging.INFO)
+ self.logger = logging.getLogger(__name__)
+
+ def generate_blog_content(self, request: RequestBlogCreate) -> Dict[str, Any]:
+ """
+ 요청 데이터를 기반으로 블로그 콘텐츠 생성
+
+ Args:
+ request: RequestBlogCreate 객체
+
+ Returns:
+ Dict: {"title": str, "content": str, "tags": List[str]} 형태의 결과
+ """
+ try:
+ # 1. 콘텐츠 정보 정리
+ content_context = self._prepare_content_context(request)
+
+ # 2. 프롬프트 생성
+ prompt = self._create_content_prompt(content_context, request)
+
+ # 3. GPT를 통한 콘텐츠 생성
+ generated_content = self._generate_with_openai(prompt)
+
+ # 4. 콘텐츠 파싱 및 구조화
+ return self._parse_generated_content(generated_content, request)
+
+ except Exception as e:
+ self.logger.error(f"콘텐츠 생성 실패: {e}")
+ return self._create_fallback_content(request)
+
+ def _prepare_content_context(self, request: RequestBlogCreate) -> str:
+ """요청 데이터를 콘텐츠 생성용 컨텍스트로 변환"""
+ context_parts = []
+
+ # 키워드 정보 추가
+ if request.keyword:
+ context_parts.append(f"주요 키워드: {request.keyword}")
+
+ # 상품 정보 추가
+ if request.product_info:
+ context_parts.append("\n상품 정보:")
+
+ # 상품 기본 정보
+ if request.product_info.get("title"):
+ context_parts.append(f"- 상품명: {request.product_info['title']}")
+
+ if request.product_info.get("price"):
+ context_parts.append(f"- 가격: {request.product_info['price']:,}원")
+
+ if request.product_info.get("rating"):
+ context_parts.append(f"- 평점: {request.product_info['rating']}/5.0")
+
+ # 상품 상세 정보
+ if request.product_info.get("description"):
+ context_parts.append(f"- 설명: {request.product_info['description']}")
+
+ # 상품 사양 (material_info 등)
+ if request.product_info.get("material_info"):
+ context_parts.append("- 주요 사양:")
+ specs = request.product_info["material_info"]
+ if isinstance(specs, dict):
+ for key, value in specs.items():
+ context_parts.append(f" * {key}: {value}")
+
+ # 상품 옵션
+ if request.product_info.get("options"):
+ options = request.product_info["options"]
+ context_parts.append(f"- 구매 옵션 ({len(options)}개):")
+ for i, option in enumerate(options[:5], 1): # 최대 5개만
+ if isinstance(option, dict):
+ option_name = option.get("name", f"옵션 {i}")
+ context_parts.append(f" {i}. {option_name}")
+ else:
+ context_parts.append(f" {i}. {option}")
+
+ # 구매 링크
+ if request.product_info.get("url") or request.product_info.get(
+ "product_url"
+ ):
+ url = request.product_info.get("url") or request.product_info.get(
+ "product_url"
+ )
+ context_parts.append(f"- 구매 링크: {url}")
+
+ return "\n".join(context_parts) if context_parts else "키워드 기반 콘텐츠 생성"
+
+ def _create_content_prompt(self, context: str, request: RequestBlogCreate) -> str:
+ """콘텐츠 생성용 프롬프트 생성"""
+
+ # 기본 키워드가 없으면 상품 제목에서 추출
+ main_keyword = request.keyword
+ if (
+ not main_keyword
+ and request.product_info
+ and request.product_info.get("title")
+ ):
+ main_keyword = request.product_info["title"]
+
+ prompt = f"""
+다음 정보를 바탕으로 매력적인 블로그 포스트를 작성해주세요.
+
+정보:
+{context}
+
+작성 가이드라인:
+- 스타일: 친근하면서도 신뢰할 수 있는, 정보 제공 중심
+- 길이: 1200자 내외의 적당한 길이
+- 톤: 독자의 관심을 끄는 자연스러운 어조
+
+작성 요구사항:
+1. SEO 친화적이고 클릭하고 싶은 매력적인 제목
+2. 독자의 관심을 끄는 도입부
+3. 핵심 특징과 장점을 구체적으로 설명
+4. 실제 사용 시나리오나 활용 팁
+5. 구매 결정에 도움이 되는 정보
+
+⚠️ 주의:
+- 절대로 마지막에 'HTML 구조는…' 같은 자기 평가 문장을 추가하지 마세요.
+- 출력 시 ```나 ```html 같은 코드 블록 구문을 포함하지 마세요.
+- 오직 HTML 태그만 사용하여 구조화된 콘텐츠를 작성해주세요.
+(예:
, , ,
, 등)
+"""
+
+ return prompt
+
+ def _generate_with_openai(self, prompt: str) -> str:
+ """OpenAI API를 통한 콘텐츠 생성"""
+ try:
+ response = self.client.chat.completions.create(
+ model="gpt-4o-mini",
+ messages=[
+ {
+ "role": "system",
+ "content": "당신은 전문적인 블로그 콘텐츠 작성자입니다. 상품 리뷰와 정보성 콘텐츠를 매력적이고 SEO 친화적으로 작성합니다.",
+ },
+ {"role": "user", "content": prompt},
+ ],
+ temperature=0.7,
+ max_tokens=2000,
+ )
+
+ return response.choices[0].message.content
+
+ except Exception as e:
+ self.logger.error(f"OpenAI API 호출 실패: {e}")
+ raise
+
+ def _parse_generated_content(
+ self, content: str, request: RequestBlogCreate
+ ) -> Dict[str, Any]:
+ """생성된 콘텐츠를 파싱하여 구조화"""
+
+ # 제목 추출 (첫 번째 h1이나 강조된 줄)
+ lines = content.strip().split("\n")
+ title = "블로그 포스트" # 기본값
+
+ for line in lines[:10]: # 처음 10줄에서 제목 찾기
+ clean_line = (
+ line.strip()
+ .replace("#", "")
+ .replace("", "")
+ .replace(" ", "")
+ .replace("", "")
+ .replace(" ", "")
+ )
+ if clean_line and len(clean_line) > 5 and len(clean_line) < 100:
+ title = clean_line
+ break
+
+ # 키워드가 있으면 제목에 없을 경우 기본 제목 생성
+ if request.keyword and request.keyword not in title:
+ if request.product_info and request.product_info.get("title"):
+ title = (
+ f"{request.product_info['title']} - {request.keyword} 완벽 가이드"
+ )
+ else:
+ title = f"{request.keyword} - 완벽 가이드"
+
+ # 태그 생성
+ tags = self._generate_tags(request)
+
+ return {"title": title, "content": content, "tags": tags}
+
+ def _generate_tags(self, request: RequestBlogCreate) -> List[str]:
+ """요청 정보 기반 태그 생성"""
+ tags = []
+
+ # 키워드 추가
+ if request.keyword:
+ tags.append(request.keyword)
+
+ # 상품 정보에서 태그 추출
+ if request.product_info:
+ # 상품명에서 키워드 추출
+ if request.product_info.get("title"):
+ title = request.product_info["title"].lower()
+
+ # 일반적인 제품 카테고리 태그
+ if any(word in title for word in ["iphone", "아이폰", "phone"]):
+ tags.extend(["아이폰", "스마트폰"])
+ if any(word in title for word in ["필름", "보호", "강화"]):
+ tags.extend(["보호필름", "강화필름"])
+ if any(word in title for word in ["케이스", "커버"]):
+ tags.extend(["폰케이스", "액세서리"])
+ if any(word in title for word in ["노트북", "laptop"]):
+ tags.extend(["노트북", "컴퓨터"])
+ if any(word in title for word in ["마우스", "키보드"]):
+ tags.extend(["컴퓨터용품", "PC액세서리"])
+
+ # 재료/사양 정보에서 태그 생성
+ if request.product_info.get("material_info"):
+ material_info = request.product_info["material_info"]
+ if isinstance(material_info, dict):
+ for key, value in material_info.items():
+ if value and len(str(value).strip()) <= 20:
+ clean_value = str(value).strip()
+ if clean_value not in tags:
+ tags.append(clean_value)
+
+ # 기본 태그 추가
+ if not tags:
+ tags = ["상품정보", "리뷰"]
+
+ # 중복 제거 및 개수 제한
+ unique_tags = []
+ for tag in tags:
+ if tag not in unique_tags and len(unique_tags) < 10:
+ unique_tags.append(tag)
+
+ return unique_tags
+
+ def _create_fallback_content(self, request: RequestBlogCreate) -> Dict[str, Any]:
+ """콘텐츠 생성 실패 시 대안 콘텐츠 생성"""
+
+ if request.product_info and request.product_info.get("title"):
+ title = f"{request.product_info['title']} - 상품 정보 및 구매 가이드"
+ product_name = request.product_info["title"]
+ elif request.keyword:
+ title = f"{request.keyword} - 완벽 가이드"
+ product_name = request.keyword
+ else:
+ title = "상품 정보 및 구매 가이드"
+ product_name = "상품"
+
+ content = f"""
+{title}
+
+상품 소개
+{product_name}에 대한 상세한 정보를 소개합니다.
+
+주요 특징
+
+고품질의 제품으로 신뢰할 수 있는 브랜드입니다
+합리적인 가격으로 가성비가 뛰어납니다
+사용자 친화적인 디자인과 기능을 제공합니다
+
+"""
+
+ if request.product_info:
+ if request.product_info.get("price"):
+ content += f"가격 정보 \n판매가: {request.product_info['price']:,}원
\n"
+
+ if request.product_info.get("material_info"):
+ content += "상품 사양 \n\n"
+ for key, value in request.product_info["material_info"].items():
+ content += f"{key}: {value} \n"
+ content += " \n"
+
+ content += """
+구매 안내
+신중한 검토를 통해 만족스러운 구매 결정을 내리시기 바랍니다.
+"""
+
+ return {
+ "title": title,
+ "content": content,
+ "tags": self._generate_tags(request),
+ }
+
+
+# if __name__ == '__main__':
+# # 테스트용 요청 데이터
+# test_request = RequestBlogCreate(
+# keyword="아이폰 케이스",
+# product_info={
+# "title": "아이폰 15 프로 투명 케이스",
+# "price": 29900,
+# "rating": 4.8,
+# "description": "9H 강화 보호 기능을 제공하는 투명 케이스",
+# "material_info": {
+# "소재": "TPU + PC",
+# "두께": "1.2mm",
+# "색상": "투명",
+# "호환성": "아이폰 15 Pro"
+# },
+# "options": [
+# {"name": "투명"},
+# {"name": "반투명"},
+# {"name": "블랙"}
+# ],
+# "url": "https://example.com/iphone-case"
+# }
+# )
+#
+# # 서비스 실행
+# service = BlogContentService()
+# print("=== 블로그 콘텐츠 생성 테스트 ===")
+# print(f"키워드: {test_request.keyword}")
+# print(f"상품: {test_request.product_info['title']}")
+# print("\n--- 생성 시작 ---")
+#
+# result = service.generate_blog_content(test_request)
+#
+# print(f"\n=== 생성 결과 ===")
+# print(f"제목: {result['title']}")
+# print(f"\n태그: {', '.join(result['tags'])}")
+# print(f"\n내용:\n{result['content']}")
+# print(f"\n글자수: {len(result['content'])}자")
diff --git a/apps/pre-processing-service/app/service/blog/blog_publish_service.py b/apps/pre-processing-service/app/service/blog/blog_publish_service.py
new file mode 100644
index 00000000..56ad9f06
--- /dev/null
+++ b/apps/pre-processing-service/app/service/blog/blog_publish_service.py
@@ -0,0 +1,42 @@
+from typing import Dict
+from app.errors.CustomException import CustomException
+from app.model.schemas import RequestBlogPublish
+from app.service.blog.blog_service_factory import BlogServiceFactory
+
+
+class BlogPublishService:
+ """블로그 발행을 담당하는 서비스 클래스"""
+
+ def __init__(self):
+ self.factory = BlogServiceFactory()
+
+ def publish_content(self, request: RequestBlogPublish) -> Dict:
+ """
+ 생성된 블로그 콘텐츠를 배포합니다.
+ """
+ try:
+ # 팩토리를 통해 적절한 서비스 생성
+ blog_service = self.factory.create_service(request.tag)
+
+ # 공통 인터페이스로 포스팅 실행
+ response_data = blog_service.post_content(
+ title=request.post_title,
+ content=request.post_content,
+ tags=request.post_tags,
+ )
+
+ if not response_data:
+ raise CustomException(
+ f"{request.tag} 블로그 포스팅에 실패했습니다.", status_code=500
+ )
+
+ return response_data
+
+ except CustomException:
+ # 이미 처리된 예외는 그대로 전달
+ raise
+ except Exception as e:
+ # 예상치 못한 예외 처리
+ raise CustomException(
+ f"블로그 포스팅 중 오류가 발생했습니다: {str(e)}", status_code=500
+ )
diff --git a/apps/pre-processing-service/app/service/blog/blog_service_factory.py b/apps/pre-processing-service/app/service/blog/blog_service_factory.py
new file mode 100644
index 00000000..b6bc6883
--- /dev/null
+++ b/apps/pre-processing-service/app/service/blog/blog_service_factory.py
@@ -0,0 +1,38 @@
+from typing import Dict, Type
+from app.service.blog.base_blog_post_service import BaseBlogPostService
+from app.service.blog.naver_blog_post_service import NaverBlogPostService
+from app.service.blog.tistory_blog_post_service import TistoryBlogPostService
+from app.service.blog.blogger_blog_post_adapter import BloggerBlogPostAdapter
+from app.errors.CustomException import CustomException
+
+
+class BlogServiceFactory:
+ """블로그 서비스 객체 생성을 담당하는 팩토리"""
+
+ # 서비스 타입별 클래스 매핑
+ _services: Dict[str, Type[BaseBlogPostService]] = {
+ "naver": NaverBlogPostService,
+ "tistory": TistoryBlogPostService,
+ "blogger": BloggerBlogPostAdapter,
+ }
+
+ @classmethod
+ def create_service(cls, platform: str) -> BaseBlogPostService:
+ """
+ 플랫폼에 따른 블로그 서비스 인스턴스 생성
+ """
+ service_class = cls._services.get(platform.lower())
+
+ if not service_class:
+ raise CustomException(
+ f"지원하지 않는 플랫폼입니다: {platform}. "
+ f"지원 플랫폼: {list(cls._services.keys())}",
+ status_code=400,
+ )
+
+ return service_class()
+
+ @classmethod
+ def get_supported_platforms(cls) -> list:
+ """지원하는 플랫폼 목록 반환"""
+ return list(cls._services.keys())
diff --git a/apps/pre-processing-service/app/service/blog/blogger_blog_post_adapter.py b/apps/pre-processing-service/app/service/blog/blogger_blog_post_adapter.py
new file mode 100644
index 00000000..717a102e
--- /dev/null
+++ b/apps/pre-processing-service/app/service/blog/blogger_blog_post_adapter.py
@@ -0,0 +1,84 @@
+from typing import Dict, List, Optional
+from app.service.blog.base_blog_post_service import BaseBlogPostService
+from app.service.blog.blogger_blog_post_service import BloggerApiService
+from app.errors.BlogPostingException import *
+
+
+class BloggerBlogPostAdapter(BaseBlogPostService):
+ """
+ BaseBlogPostService와 호환되도록 BloggerApiService를 감싼 어댑터
+ 현재 BaseBlogPostService 인터페이스와 호환
+ """
+
+ def __init__(self, config_file="blog_config.json"):
+ # API 전용 서비스 (Adaptee) 먼저 초기화
+ self.api_service = BloggerApiService(config_file=config_file)
+
+ try:
+ # 부모 클래스의 웹드라이버 초기화를 시도하지만, 실패해도 무시
+ # 이렇게 하면 부모의 다른 초기화 로직은 실행됨
+ super().__init__()
+ except Exception:
+ # 웹드라이버 초기화 실패 시 API 서비스용으로 속성 설정
+ self.crawling_service = None
+ self.web_driver = None
+ self.wait_driver = None
+ # 설정 로드는 직접 호출
+ self._load_config()
+
+ def _load_config(self) -> None:
+ """
+ BloggerApiService 내부에서 이미 처리되므로 별도 구현 불필요
+ """
+ # API 서비스의 설정이 이미 로드되었으므로 추가 작업 없음
+ pass
+
+ def _login(self) -> None:
+ """
+ Selenium 로그인과 달리, OAuth 인증으로 대체
+ """
+ try:
+ self.api_service.authenticate_with_google_oauth()
+ except Exception as e:
+ raise BlogLoginException("Blogger", f"OAuth 인증 실패: {str(e)}")
+
+ def _write_content(self, title: str, content: str, tags: List[str] = None) -> None:
+ """
+ API를 통한 포스트 작성
+ """
+ try:
+ result = self.api_service.create_post_via_api(title, content, labels=tags)
+ # 결과 로깅
+ print(f"포스트 생성 완료: {result.get('published_url', 'URL 없음')}")
+ except Exception as e:
+ raise BlogPostPublishException("Blogger", f"포스트 작성 실패: {str(e)}")
+
+ def _get_platform_name(self) -> str:
+ """플랫폼 이름 반환"""
+ return "Blogger"
+
+ def _validate_content(
+ self, title: str, content: str, tags: Optional[List[str]] = None
+ ) -> None:
+ """
+ API 전용 유효성 검사 호출
+ """
+ try:
+ # Optional을 List로 변환 (None인 경우 빈 리스트)
+ tags_list = tags if tags is not None else []
+ self.api_service.validate_api_content(title, content, labels=tags_list)
+ except Exception as e:
+ # BloggerApiService의 예외를 BaseBlogPostService 호환 예외로 변환
+ if "title" in str(e).lower():
+ raise BlogContentValidationException("title", str(e))
+ elif "content" in str(e).lower():
+ raise BlogContentValidationException("content", str(e))
+ else:
+ raise BlogContentValidationException("general", str(e))
+
+ def __del__(self):
+ """
+ API 서비스이므로 웹드라이버 정리가 불필요
+ """
+ # 웹드라이버가 없으므로 정리할 것이 없음
+ pass
diff --git a/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py b/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py
index 07e337d9..8bdeb221 100644
--- a/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py
+++ b/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py
@@ -1,39 +1,32 @@
import json
import os
import pickle
-from typing import Dict, List, Optional
-
from googleapiclient.discovery import build
from google.auth.transport.requests import Request
from google_auth_oauthlib.flow import InstalledAppFlow
-
from app.errors.BlogPostingException import *
-from app.service.blog.base_blog_post_service import BaseBlogPostService
+from typing import Dict
-class BloggerBlogPostService(BaseBlogPostService):
+class BloggerApiService:
"""
- Blogger API를 사용하여 포스팅을 관리하는 서비스
+ 호환되지 않는 Blogger API 서비스 (Adaptee)
+ 완전히 다른 초기화/인증 방식을 사용
"""
def __init__(self, config_file="blog_config.json"):
- # 부모 클래스 생성자 호출 (WebDriver는 None으로 설정됨)
- super().__init__()
-
- # API 관련 추가 초기화
self.config_file = config_file
+ self.config = {}
+ self.current_upload_account = None
self.blogger_service = None
self.blog_id = None
self.scopes = ["https://www.googleapis.com/auth/blogger"]
+ self.authenticated = False
- def _requires_webdriver(self) -> bool:
- """API 기반 서비스는 WebDriver가 필요하지 않음"""
- return False
+ self._load_api_config()
- def _load_config(self) -> None:
- """
- 플랫폼별 설정 로드
- """
+ def _load_api_config(self) -> None:
+ """API 전용 설정 로드"""
try:
with open(self.config_file, "r", encoding="utf-8") as f:
self.config = json.load(f)
@@ -48,16 +41,11 @@ def _load_config(self) -> None:
self.config = default_config
self.current_upload_account = self.config["upload_account"]
- def _login(self) -> None:
- """
- API 인증 (Selenium의 로그인을 대체)
- """
- self._authenticate_api()
+ def authenticate_with_google_oauth(self) -> bool:
+ """Google OAuth 인증 (Selenium 로그인과 완전히 다름)"""
+ if self.authenticated:
+ return True
- def _authenticate_api(self):
- """
- API 인증 및 서비스 객체 생성
- """
token_file = f"token_{self.current_upload_account.replace('@', '_').replace('.', '_')}.pkl"
try:
@@ -85,22 +73,22 @@ def _authenticate_api(self):
if blogs.get("items"):
self.blog_id = blogs["items"][0]["id"]
print(f"API 설정 완료 - 블로그: {blogs['items'][0]['name']}")
+ self.authenticated = True
return True
else:
- print("블로그를 찾을 수 없습니다.")
- return False
+ raise BloggerApiException("블로그를 찾을 수 없습니다")
+
except Exception as e:
- print(f"API 인증/설정 실패: {e}")
raise BloggerApiException("API 인증 실패", e)
- def _write_content(self, title: str, content: str, tags: List[str] = None) -> None:
- """
- API를 사용하여 포스팅 작성
- """
- if not self.blogger_service or not self.blog_id:
- self._authenticate_api()
+ def create_post_via_api(
+ self, title: str, content: str, labels: List[str] = None
+ ) -> Dict:
+ """API를 통한 포스트 생성 (Selenium write_content와 완전히 다름)"""
+ if not self.authenticated:
+ self.authenticate_with_google_oauth()
- post_data = {"title": title, "content": content, "labels": tags or []}
+ post_data = {"title": title, "content": content, "labels": labels or []}
try:
result = (
@@ -109,35 +97,22 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No
.execute()
)
- print(f"포스트 생성 완료: {result.get('url')}")
+ return {
+ "blogger_post_id": result.get("id"),
+ "published_url": result.get("url"),
+ "status": "published",
+ }
except Exception as e:
raise BlogPostPublishException(
platform="Blogger", reason="API 통신 중 오류가 발생했습니다."
) from e
- def _get_platform_name(self) -> str:
- """플랫폼 이름 반환"""
- return "Blogger"
-
- def _validate_content(
- self, title: str, content: str, tags: Optional[List[str]] = None
+ def validate_api_content(
+ self, title: str, content: str, labels: List[str] = None
) -> None:
- """
- 공통 유효성 검사 로직
- """
+ """API 전용 유효성 검사"""
if not title or not title.strip():
raise BlogContentValidationException("title", "제목이 비어있습니다")
-
if not content or not content.strip():
raise BlogContentValidationException("content", "내용이 비어있습니다")
-
- # 태그 유효성 검사도 필요에 따라 추가
- # if not tags or not isinstance(tags, list):
- # raise BlogContentValidationException("tags", "태그는 리스트 형태여야 합니다")
-
- def __del__(self):
- """
- 리소스 정리 - API 기반 서비스는 별도 정리 불필요
- 부모 클래스의 __del__이 WebDriver 정리를 처리
- """
- super().__del__()
+ # Blogger는 태그가 선택사항
diff --git a/apps/pre-processing-service/app/service/crawl_service.py b/apps/pre-processing-service/app/service/crawl_service.py
index 4122bb2e..f54d4db7 100644
--- a/apps/pre-processing-service/app/service/crawl_service.py
+++ b/apps/pre-processing-service/app/service/crawl_service.py
@@ -1,8 +1,9 @@
import time
-from app.utils.crawler_utils import DetailCrawler
+from app.service.crawlers.detail_crawler import DetailCrawler
from app.errors.CustomException import InvalidItemDataException
from app.model.schemas import RequestSadaguCrawl
from loguru import logger
+from app.utils.response import Response
class CrawlService:
@@ -18,7 +19,7 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict:
try:
logger.info(
- f"상품 상세 크롤링 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, product_url={request.product_url}"
+ f"상품 상세 크롤링 서비스 시작: product_url={request.product_url}"
)
# 상세 정보 크롤링 실행
@@ -36,25 +37,19 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict:
)
# 응답 데이터 구성
- response_data = {
- "job_id": request.job_id,
- "schedule_id": request.schedule_id,
- "schedule_his_id": request.schedule_his_id,
+ data = {
"tag": request.tag,
"product_url": str(request.product_url),
"product_detail": product_detail,
- "status": "success",
"crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"),
}
- logger.info(
- f"상품 상세 크롤링 서비스 완료: job_id={request.job_id}, status=success"
- )
- return response_data
+ logger.info(f"상품 상세 크롤링 서비스 완료: status=success")
+ return Response.ok(data)
except Exception as e:
logger.error(
- f"크롤링 서비스 오류: job_id={request.job_id}, product_url={request.product_url}, error='{e}'"
+ f"크롤링 서비스 오류: product_url={request.product_url}, error='{e}'"
)
raise InvalidItemDataException()
finally:
diff --git a/apps/pre-processing-service/app/service/crawlers/__init__.py b/apps/pre-processing-service/app/service/crawlers/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/apps/pre-processing-service/app/service/crawlers/base_crawler.py b/apps/pre-processing-service/app/service/crawlers/base_crawler.py
new file mode 100644
index 00000000..27934ab5
--- /dev/null
+++ b/apps/pre-processing-service/app/service/crawlers/base_crawler.py
@@ -0,0 +1,56 @@
+import httpx
+import time
+from abc import ABC, abstractmethod
+from bs4 import BeautifulSoup
+from loguru import logger
+from app.utils.crawling_util import CrawlingUtil
+
+
+class BaseCrawler(ABC):
+ """크롤러 기본 클래스"""
+
+ def __init__(self, use_selenium: bool = True, headless: bool = True):
+ self.base_url = "https://ssadagu.kr"
+ self.use_selenium = use_selenium
+
+ if use_selenium:
+ self._setup_selenium(headless)
+ else:
+ self._setup_httpx()
+
+ def _setup_selenium(self, headless: bool):
+ """Selenium WebDriver 초기화"""
+ try:
+ self.crawling_util = CrawlingUtil(headless=headless)
+ self.driver = self.crawling_util.get_driver()
+ self.wait = self.crawling_util.get_wait()
+ logger.info("Selenium WebDriver 초기화 완료")
+ except Exception as e:
+ logger.warning(f"Selenium 초기화 실패, httpx로 대체: {e}")
+ self.use_selenium = False
+ self._setup_httpx()
+
+ def _setup_httpx(self):
+ """httpx 클라이언트 초기화"""
+ self.client = httpx.AsyncClient(
+ headers={
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
+ },
+ timeout=30.0,
+ )
+ logger.info("httpx 클라이언트 초기화 완료")
+
+ async def close(self):
+ """리소스 정리"""
+ if self.use_selenium and hasattr(self, "crawling_util"):
+ try:
+ self.crawling_util.close()
+ logger.info("Selenium WebDriver 종료 완료")
+ except Exception as e:
+ logger.warning(f"Selenium WebDriver 종료 중 오류: {e}")
+ elif hasattr(self, "client"):
+ try:
+ await self.client.aclose()
+ logger.info("httpx 클라이언트 종료 완료")
+ except Exception as e:
+ logger.warning(f"httpx 클라이언트 종료 중 오류: {e}")
diff --git a/apps/pre-processing-service/app/utils/crawler_utils.py b/apps/pre-processing-service/app/service/crawlers/detail_crawler.py
similarity index 55%
rename from apps/pre-processing-service/app/utils/crawler_utils.py
rename to apps/pre-processing-service/app/service/crawlers/detail_crawler.py
index 5c593b9f..885fd2f0 100644
--- a/apps/pre-processing-service/app/utils/crawler_utils.py
+++ b/apps/pre-processing-service/app/service/crawlers/detail_crawler.py
@@ -1,185 +1,10 @@
-import urllib.parse
-import httpx
-import re
import time
+import re
from bs4 import BeautifulSoup
-from selenium import webdriver
-from selenium.webdriver.chrome.options import Options
-from selenium.webdriver.common.by import By
-from selenium.webdriver.support.ui import WebDriverWait
-from selenium.common.exceptions import TimeoutException, NoSuchElementException
+from .search_crawler import SearchCrawler
from loguru import logger
-class SearchCrawler:
- def __init__(self, use_selenium=True):
- self.base_url = "https://ssadagu.kr"
- self.use_selenium = use_selenium
-
- if use_selenium:
- self._setup_selenium()
- else:
- self._setup_httpx()
-
- def _setup_selenium(self):
- """Selenium WebDriver 초기화"""
- chrome_options = Options()
- chrome_options.add_argument("--headless")
- chrome_options.add_argument("--no-sandbox")
- chrome_options.add_argument("--disable-dev-shm-usage")
- chrome_options.add_argument("--disable-gpu")
- chrome_options.add_argument("--window-size=1920,1080")
- chrome_options.add_argument(
- "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
- )
-
- try:
- self.driver = webdriver.Chrome(options=chrome_options)
- self.wait = WebDriverWait(self.driver, 10)
- logger.info("Selenium WebDriver 초기화 완료")
- except Exception as e:
- logger.warning(f"Selenium 초기화 실패, httpx로 대체: {e}")
- self.use_selenium = False
- self._setup_httpx()
-
- def _setup_httpx(self):
- """httpx 클라이언트 초기화"""
- self.client = httpx.AsyncClient(
- headers={
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
- },
- timeout=30.0,
- )
- logger.info("httpx 클라이언트 초기화 완료")
-
- async def search_products_selenium(self, keyword: str) -> list[dict]:
- """Selenium을 사용한 상품 검색"""
- encoded_keyword = urllib.parse.quote(keyword)
- search_url = f"{self.base_url}/shop/search.php?ss_tx={encoded_keyword}"
-
- try:
- logger.info(
- f"Selenium 상품 검색 시작: keyword='{keyword}', url='{search_url}'"
- )
- self.driver.get(search_url)
- time.sleep(5)
-
- product_links = []
- link_elements = self.driver.find_elements(By.TAG_NAME, "a")
-
- for element in link_elements:
- href = element.get_attribute("href")
- if (
- href
- and "view.php" in href
- and ("platform=1688" in href or "num_iid" in href)
- ):
- try:
- title = element.get_attribute("title") or element.text.strip()
- if title:
- product_links.append({"url": href, "title": title})
- except:
- product_links.append({"url": href, "title": "Unknown Title"})
-
- # 중복 제거
- seen_urls = set()
- unique_products = []
- for product in product_links:
- if product["url"] not in seen_urls:
- seen_urls.add(product["url"])
- unique_products.append(product)
-
- logger.info(
- f"Selenium으로 발견한 상품 링크: {len(unique_products)}개 (중복 제거 전: {len(product_links)}개)"
- )
- return unique_products[:20]
-
- except Exception as e:
- logger.error(f"Selenium 검색 오류: keyword='{keyword}', error='{e}'")
- return []
-
- async def search_products_httpx(self, keyword: str) -> list[dict]:
- """httpx를 사용한 상품 검색"""
- encoded_keyword = urllib.parse.quote(keyword)
- search_url = f"{self.base_url}/shop/search.php?ss_tx={encoded_keyword}"
-
- try:
- logger.info(
- f"httpx 상품 검색 시작: keyword='{keyword}', url='{search_url}'"
- )
- response = await self.client.get(search_url)
- response.raise_for_status()
- soup = BeautifulSoup(response.content, "html.parser")
-
- product_links = []
- all_links = soup.find_all("a", href=True)
-
- for link in all_links:
- href = link["href"]
- if "view.php" in href and (
- "platform=1688" in href or "num_iid" in href
- ):
- full_url = (
- f"{self.base_url}{href}" if href.startswith("/") else href
- )
- title = (
- link.get("title", "")
- or link.get_text(strip=True)
- or "Unknown Title"
- )
-
- product_links.append({"url": full_url, "title": title})
-
- logger.info(f"httpx로 발견한 상품 링크: {len(product_links)}개")
- return product_links[:20]
-
- except Exception as e:
- logger.error(f"httpx 검색 오류: keyword='{keyword}', error='{e}'")
- return []
-
- async def get_basic_product_info(self, product_url: str) -> dict:
- """기본 상품 정보만 크롤링"""
- try:
- logger.debug(f"기본 상품 정보 크롤링 시작: url='{product_url}'")
-
- if self.use_selenium:
- self.driver.get(product_url)
- self.wait.until(
- lambda driver: driver.execute_script("return document.readyState")
- == "complete"
- )
- soup = BeautifulSoup(self.driver.page_source, "html.parser")
- else:
- response = await self.client.get(product_url)
- response.raise_for_status()
- soup = BeautifulSoup(response.content, "html.parser")
-
- title_element = soup.find("h1", {"id": "kakaotitle"})
- title = title_element.get_text(strip=True) if title_element else "제목 없음"
-
- logger.debug(f"기본 상품 정보 크롤링 완료: title='{title[:50]}'")
- return {"url": product_url, "title": title}
-
- except Exception as e:
- logger.error(f"기본 상품 크롤링 오류: url='{product_url}', error='{e}'")
- return None
-
- async def close(self):
- """리소스 정리"""
- if self.use_selenium and hasattr(self, "driver"):
- try:
- self.driver.quit()
- logger.info("Selenium WebDriver 종료 완료")
- except Exception as e:
- logger.warning(f"Selenium WebDriver 종료 중 오류: {e}")
- elif hasattr(self, "client"):
- try:
- await self.client.aclose()
- logger.info("httpx 클라이언트 종료 완료")
- except Exception as e:
- logger.warning(f"httpx 클라이언트 종료 중 오류: {e}")
-
-
class DetailCrawler(SearchCrawler):
"""SearchCrawler를 확장한 상세 크롤링 클래스"""
diff --git a/apps/pre-processing-service/app/service/crawlers/search_crawler.py b/apps/pre-processing-service/app/service/crawlers/search_crawler.py
new file mode 100644
index 00000000..a0d46e02
--- /dev/null
+++ b/apps/pre-processing-service/app/service/crawlers/search_crawler.py
@@ -0,0 +1,137 @@
+import urllib.parse
+import time
+from .base_crawler import BaseCrawler
+from loguru import logger
+from bs4 import BeautifulSoup
+from selenium.webdriver.common.by import By
+
+
+class SearchCrawler(BaseCrawler):
+ """상품 검색 전용 크롤러"""
+
+ async def search_products_selenium(self, keyword: str) -> list[dict]:
+ """Selenium을 사용한 상품 검색"""
+ encoded_keyword = urllib.parse.quote(keyword)
+ search_url = f"{self.base_url}/shop/search.php?ss_tx={encoded_keyword}"
+
+ try:
+ logger.info(
+ f"Selenium 상품 검색 시작: keyword='{keyword}', url='{search_url}'"
+ )
+ self.driver.get(search_url)
+ time.sleep(5)
+
+ product_links = []
+ link_elements = self.driver.find_elements(By.TAG_NAME, "a")
+
+ for element in link_elements:
+ href = element.get_attribute("href")
+ if (
+ href
+ and "view.php" in href
+ and ("platform=1688" in href or "num_iid" in href)
+ ):
+ try:
+ title = element.get_attribute("title") or element.text.strip()
+ if title:
+ product_links.append({"url": href, "title": title})
+ except:
+ product_links.append({"url": href, "title": "Unknown Title"})
+
+ # 중복 제거
+ seen_urls = set()
+ unique_products = []
+ for product in product_links:
+ if product["url"] not in seen_urls:
+ seen_urls.add(product["url"])
+ unique_products.append(product)
+
+ logger.info(
+ f"Selenium으로 발견한 상품 링크: {len(unique_products)}개 (중복 제거 전: {len(product_links)}개)"
+ )
+ return unique_products[:20]
+
+ except Exception as e:
+ logger.error(f"Selenium 검색 오류: keyword='{keyword}', error='{e}'")
+ return []
+
+ async def search_products_httpx(self, keyword: str) -> list[dict]:
+ """httpx를 사용한 상품 검색"""
+ encoded_keyword = urllib.parse.quote(keyword)
+ search_url = f"{self.base_url}/shop/search.php?ss_tx={encoded_keyword}"
+
+ try:
+ logger.info(
+ f"httpx 상품 검색 시작: keyword='{keyword}', url='{search_url}'"
+ )
+ response = await self.client.get(search_url)
+ response.raise_for_status()
+ soup = BeautifulSoup(response.content, "html.parser")
+
+ product_links = []
+ all_links = soup.find_all("a", href=True)
+
+ for link in all_links:
+ href = link["href"]
+ if "view.php" in href and (
+ "platform=1688" in href or "num_iid" in href
+ ):
+ full_url = (
+ f"{self.base_url}{href}" if href.startswith("/") else href
+ )
+ title = (
+ link.get("title", "")
+ or link.get_text(strip=True)
+ or "Unknown Title"
+ )
+
+ product_links.append({"url": full_url, "title": title})
+
+ logger.info(f"httpx로 발견한 상품 링크: {len(product_links)}개")
+ return product_links[:20]
+
+ except Exception as e:
+ logger.error(f"httpx 검색 오류: keyword='{keyword}', error='{e}'")
+ return []
+
+ async def get_basic_product_info(self, product_url: str) -> dict:
+ """기본 상품 정보만 크롤링"""
+ try:
+ logger.debug(f"기본 상품 정보 크롤링 시작: url='{product_url}'")
+
+ if self.use_selenium:
+ self.driver.get(product_url)
+ self.wait.until(
+ lambda driver: driver.execute_script("return document.readyState")
+ == "complete"
+ )
+ soup = BeautifulSoup(self.driver.page_source, "html.parser")
+ else:
+ response = await self.client.get(product_url)
+ response.raise_for_status()
+ soup = BeautifulSoup(response.content, "html.parser")
+
+ title_element = soup.find("h1", {"id": "kakaotitle"})
+ title = title_element.get_text(strip=True) if title_element else "제목 없음"
+
+ logger.debug(f"기본 상품 정보 크롤링 완료: title='{title[:50]}'")
+ return {"url": product_url, "title": title}
+
+ except Exception as e:
+ logger.error(f"기본 상품 크롤링 오류: url='{product_url}', error='{e}'")
+ return None
+
+ async def close(self):
+ """리소스 정리"""
+ if self.use_selenium and hasattr(self, "driver"):
+ try:
+ self.driver.quit()
+ logger.info("Selenium WebDriver 종료 완료")
+ except Exception as e:
+ logger.warning(f"Selenium WebDriver 종료 중 오류: {e}")
+ elif hasattr(self, "client"):
+ try:
+ await self.client.aclose()
+ logger.info("httpx 클라이언트 종료 완료")
+ except Exception as e:
+ logger.warning(f"httpx 클라이언트 종료 중 오류: {e}")
diff --git a/apps/pre-processing-service/app/service/keyword_service.py b/apps/pre-processing-service/app/service/keyword_service.py
index 575767ee..b6341482 100644
--- a/apps/pre-processing-service/app/service/keyword_service.py
+++ b/apps/pre-processing-service/app/service/keyword_service.py
@@ -1,12 +1,11 @@
-# Pydantic 모델을 가져오기 위해 schemas 파일 import
import json
import random
-
+from app.utils.response import Response
import httpx
-from starlette import status
-
+from loguru import logger
from ..errors.CustomException import InvalidItemDataException
from ..model.schemas import RequestNaverSearch
+from datetime import date, timedelta
async def keyword_search(request: RequestNaverSearch) -> dict:
@@ -14,12 +13,10 @@ async def keyword_search(request: RequestNaverSearch) -> dict:
네이버 검색 요청을 처리하는 비즈니스 로직입니다.
입력받은 데이터를 기반으로 응답 데이터를 생성하여 딕셔너리로 반환합니다.
"""
-
+ logger.info(f"검색 플랫폼: {request.tag}")
# 키워드 검색
if request.tag == "naver":
- trending_keywords = await search_naver_rank(
- **request.model_dump(include={"category", "start_date", "end_date"})
- )
+ trending_keywords = await search_naver_rank()
elif request.tag == "naver_store":
trending_keywords = await search_naver_store()
else:
@@ -28,14 +25,16 @@ async def keyword_search(request: RequestNaverSearch) -> dict:
if not trending_keywords:
raise InvalidItemDataException()
- response_data = request.model_dump()
- response_data["keyword"] = random.choice(list(trending_keywords.values()))
- response_data["total_keyword"] = trending_keywords
- response_data["status"] = "success"
- return response_data
+ keyword = random.choice(list(trending_keywords.values()))
+ logger.info(f"선택된 키워드: {keyword}, 검색된 키워드 수: {len(trending_keywords)}")
+ data = {
+ "keyword": keyword,
+ "total_keyword": trending_keywords,
+ }
+ return Response.ok(data)
-async def search_naver_rank(category, start_date, end_date) -> dict[int, str]:
+async def search_naver_rank() -> dict[int, str]:
"""
네이버 데이터 랩 키워드 검색 모듈
"""
@@ -45,6 +44,28 @@ async def search_naver_rank(category, start_date, end_date) -> dict[int, str]:
"Referer": "https://datalab.naver.com/shoppingInsight/sCategory.naver",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36",
}
+ categorys = [
+ "50000000",
+ "50000001",
+ "50000002",
+ "50000003",
+ "50000004",
+ "50000005",
+ "50000006",
+ "50000007",
+ "50000008",
+ "50000009",
+ ]
+ category = random.choice(categorys)
+ logger.info(f"선택된 카테고리:{category}")
+ today = date.today()
+ yesterday = today - timedelta(days=1)
+
+ # 3. 원하는 포맷(YYYY-MM-DD)으로 변환하기
+ end_date = today.strftime("%Y-%m-%d")
+ logger.info(f"실행 날짜: {end_date}")
+ start_date = yesterday.strftime("%Y-%m-%d")
+
keywords_dic = {}
async with httpx.AsyncClient() as client:
for page in range(1, 3):
@@ -69,7 +90,9 @@ async def search_naver_rank(category, start_date, end_date) -> dict[int, str]:
httpx.RequestError,
json.JSONDecodeError,
) as e:
- print(f"네이버 데이터랩에서 데이터를 가져오는 데 실패했습니다: {e}")
+ logger.error(
+ f"네이버 데이터랩에서 데이터를 가져오는 데 실패했습니다: {e}"
+ )
raise InvalidItemDataException
return keywords_dic
@@ -81,7 +104,6 @@ async def search_naver_store() -> dict[int, str]:
"""
url = "https://snxbest.naver.com/api/v1/snxbest/keyword/rank?ageType=ALL&categoryId=A&sortType=KEYWORD_POPULAR&periodType=DAILY"
headers = {}
-
async with httpx.AsyncClient() as client:
try:
# API에 GET 요청을 보냅니다.
@@ -96,5 +118,5 @@ async def search_naver_store() -> dict[int, str]:
return keyword_dict
except (httpx.HTTPStatusError, httpx.RequestError, json.JSONDecodeError) as e:
- print(f"네이버 스토어에서 데이터를 가져오는 데 실패했습니다: {e}")
+ logger.error(f"네이버 스토어에서 데이터를 가져오는 데 실패했습니다: {e}")
raise InvalidItemDataException from e
diff --git a/apps/pre-processing-service/app/service/match_service.py b/apps/pre-processing-service/app/service/match_service.py
index 5816957a..bb6e88e2 100644
--- a/apps/pre-processing-service/app/service/match_service.py
+++ b/apps/pre-processing-service/app/service/match_service.py
@@ -2,6 +2,7 @@
from app.errors.CustomException import InvalidItemDataException
from ..model.schemas import RequestSadaguMatch
from loguru import logger
+from app.utils.response import Response
class MatchService:
@@ -15,20 +16,16 @@ def match_products(self, request: RequestSadaguMatch) -> dict:
keyword = request.keyword
products = request.search_results
- logger.info(
- f"키워드 매칭 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}', products_count={len(products) if products else 0}"
- )
+ logger.info(f"키워드 매칭 서비스 시작: keyword='{keyword}'")
if not products:
logger.warning(f"매칭할 상품이 없음: keyword='{keyword}'")
- return {
- "job_id": request.job_id,
- "schedule_id": request.schedule_id,
- "schedule_his_id": request.schedule_his_id,
+
+ data = {
"keyword": keyword,
"matched_products": [],
- "status": "success",
}
+ return Response.ok(data, "매칭상품이 존재하지 않습니다.")
try:
matcher = KeywordMatcher()
@@ -78,18 +75,12 @@ def match_products(self, request: RequestSadaguMatch) -> dict:
logger.info(
f"최고 매칭 상품: title='{best_match['title'][:30]}', score={best_match['match_info']['match_score']:.3f}"
)
-
- return {
- "job_id": request.job_id,
- "schedule_id": request.schedule_id,
- "schedule_his_id": request.schedule_his_id,
+ data = {
"keyword": keyword,
"matched_products": matched_products,
- "status": "success",
}
+ return Response.ok(data)
except Exception as e:
- logger.error(
- f"매칭 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'"
- )
+ logger.error(f"매칭 서비스 오류: error='{e}'")
raise InvalidItemDataException()
diff --git a/apps/pre-processing-service/app/service/search_service.py b/apps/pre-processing-service/app/service/search_service.py
index 6fb09c0f..171bd57f 100644
--- a/apps/pre-processing-service/app/service/search_service.py
+++ b/apps/pre-processing-service/app/service/search_service.py
@@ -1,7 +1,10 @@
-from app.utils.crawler_utils import SearchCrawler
+from app.service.crawlers.search_crawler import SearchCrawler
from app.errors.CustomException import InvalidItemDataException
from ..model.schemas import RequestSadaguSearch
from loguru import logger
+from app.utils.response import Response
+from datetime import datetime
+import time
class SearchService:
@@ -15,10 +18,11 @@ async def search_products(self, request: RequestSadaguSearch) -> dict:
keyword = request.keyword
crawler = SearchCrawler(use_selenium=True)
+ # 시작 시간 기록
+ start_time = time.time()
+
try:
- logger.info(
- f"상품 검색 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}'"
- )
+ logger.info(f"keyword='{keyword}'")
# Selenium 또는 httpx로 상품 검색
if crawler.use_selenium:
@@ -28,14 +32,13 @@ async def search_products(self, request: RequestSadaguSearch) -> dict:
if not search_results:
logger.warning(f"검색 결과가 없습니다: keyword='{keyword}'")
- return {
- "job_id": request.job_id,
- "schedule_id": request.schedule_id,
- "schedule_his_id": request.schedule_his_id,
+
+ # SadaguSearchData 구조에 맞춰 response_data 생성
+ data = {
"keyword": keyword,
"search_results": [],
- "status": "success",
}
+ return Response.ok(data, "검색 결과가 없습니다.")
# 상품별 기본 정보 수집 (제목이 없는 경우 다시 크롤링)
enriched_results = []
@@ -85,23 +88,23 @@ async def search_products(self, request: RequestSadaguSearch) -> dict:
)
continue
+ # 검색 소요 시간 계산
+ search_time_ms = int((time.time() - start_time) * 1000)
+ logger.info(f"검색 소요 시간 = {search_time_ms}")
+
logger.success(
f"상품 검색 완료: keyword='{keyword}', 초기검색={len(search_results)}개, 최종유효상품={len(enriched_results)}개"
)
- return {
- "job_id": request.job_id,
- "schedule_id": request.schedule_id,
- "schedule_his_id": request.schedule_his_id,
+ # SadaguSearchData 구조에 맞춰 response_data 생성
+ data = {
"keyword": keyword,
"search_results": enriched_results,
- "status": "success",
}
+ return Response.ok(data)
except Exception as e:
- logger.error(
- f"검색 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'"
- )
+ logger.error(f"검색 서비스 오류: keyword='{keyword}', error='{e}'")
raise InvalidItemDataException()
finally:
diff --git a/apps/pre-processing-service/app/service/similarity_service.py b/apps/pre-processing-service/app/service/similarity_service.py
index 0e245da1..516b0c63 100644
--- a/apps/pre-processing-service/app/service/similarity_service.py
+++ b/apps/pre-processing-service/app/service/similarity_service.py
@@ -1,177 +1,162 @@
-# from app.utils.similarity_analyzer import SimilarityAnalyzer
-# from app.errors.CustomException import InvalidItemDataException
-# from ..model.schemas import RequestSadaguSimilarity
-# from loguru import logger
-#
-#
-# class SimilarityService:
-# def __init__(self):
-# pass
-#
-# def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict:
-# """
-# BERT 기반 유사도 분석 후 상품 선택 - 4단계
-# """
-# keyword = request.keyword
-# candidates = request.matched_products
-# fallback_products = request.search_results or []
-#
-# logger.info(
-# f"유사도 분석 서비스 시작: job_id={request.job_id}, keyword='{keyword}', matched_count={len(candidates) if candidates else 0}, fallback_count={len(fallback_products)}"
-# )
-#
-# # 매칭된 상품이 없으면 전체 검색 결과로 폴백
-# if not candidates:
-# if not fallback_products:
-# logger.warning(
-# f"매칭된 상품과 검색 결과가 모두 없음: keyword='{keyword}'"
-# )
-# return {
-# "job_id": request.job_id,
-# "schedule_id": request.schedule_id,
-# "schedule_his_id": request.schedule_his_id,
-# "keyword": keyword,
-# "selected_product": None,
-# "reason": "매칭된 상품과 검색 결과가 모두 없음",
-# "status": "success",
-# }
-#
-# logger.info("매칭된 상품 없음 → 전체 검색 결과에서 유사도 분석")
-# candidates = fallback_products
-# analysis_mode = "fallback_similarity_only"
-# else:
-# analysis_mode = "matched_products"
-#
-# try:
-# analyzer = SimilarityAnalyzer()
-#
-# logger.info(
-# f"키워드 '{keyword}'와 {len(candidates)}개 상품의 유사도 분석 시작... (모드: {analysis_mode})"
-# )
-#
-# # 한 개만 있으면 바로 선택
-# if len(candidates) == 1:
-# selected_product = candidates[0]
-#
-# logger.info("단일 후보 상품 - 유사도 검증 진행")
-# # 유사도 계산
-# similarity = analyzer.calculate_similarity(
-# keyword, selected_product["title"]
-# )
-#
-# # 폴백 모드에서는 임계값 검증
-# if analysis_mode == "fallback_similarity_only":
-# similarity_threshold = 0.3
-# if similarity < similarity_threshold:
-# logger.warning(
-# f"단일 상품 유사도 미달: similarity={similarity:.4f} < threshold={similarity_threshold}"
-# )
-# return {
-# "job_id": request.job_id,
-# "schedule_id": request.schedule_id,
-# "schedule_his_id": request.schedule_his_id,
-# "keyword": keyword,
-# "selected_product": None,
-# "reason": f"단일 상품 유사도({similarity:.4f}) < 기준({similarity_threshold})",
-# "status": "success",
-# }
-#
-# selected_product["similarity_info"] = {
-# "similarity_score": float(similarity),
-# "analysis_type": "single_candidate",
-# "analysis_mode": analysis_mode,
-# }
-#
-# logger.success(
-# f"단일 상품 선택 완료: title='{selected_product['title'][:30]}', similarity={similarity:.4f}"
-# )
-#
-# return {
-# "job_id": request.job_id,
-# "schedule_id": request.schedule_id,
-# "schedule_his_id": request.schedule_his_id,
-# "keyword": keyword,
-# "selected_product": selected_product,
-# "reason": f"단일 상품 - 유사도: {similarity:.4f} ({analysis_mode})",
-# "status": "success",
-# }
-#
-# # 여러 개가 있으면 유사도 비교
-# logger.info("여러 상품 중 최고 유사도로 선택...")
-#
-# # 제목만 추출해서 배치 분석
-# titles = [product["title"] for product in candidates]
-# similarity_results = analyzer.analyze_similarity_batch(keyword, titles)
-#
-# # 결과 출력
-# logger.info("유사도 분석 결과:")
-# for i, result in enumerate(similarity_results[:5]): # 상위 5개만 로그
-# logger.info(
-# f" {i+1}위: {result['title'][:40]} | 유사도: {result['similarity']:.4f}"
-# )
-#
-# # 최고 유사도 선택
-# best_result = similarity_results[0]
-# selected_product = candidates[best_result["index"]].copy()
-#
-# # 폴백 모드에서는 임계값 검증
-# similarity_threshold = 0.3
-# if (
-# analysis_mode == "fallback_similarity_only"
-# and best_result["similarity"] < similarity_threshold
-# ):
-# logger.warning(
-# f"최고 유사도 미달: similarity={best_result['similarity']:.4f} < threshold={similarity_threshold}"
-# )
-# return {
-# "job_id": request.job_id,
-# "schedule_id": request.schedule_id,
-# "schedule_his_id": request.schedule_his_id,
-# "keyword": keyword,
-# "selected_product": None,
-# "reason": f"최고 유사도({best_result['similarity']:.4f}) < 기준({similarity_threshold})",
-# "status": "success",
-# }
-#
-# # 유사도 정보 추가
-# selected_product["similarity_info"] = {
-# "similarity_score": best_result["similarity"],
-# "analysis_type": "multi_candidate_bert",
-# "analysis_mode": analysis_mode,
-# "rank": 1,
-# "total_candidates": len(candidates),
-# }
-#
-# # 매칭 모드에서는 종합 점수도 계산
-# if analysis_mode == "matched_products" and "match_info" in selected_product:
-# match_score = selected_product["match_info"]["match_score"]
-# similarity_score = best_result["similarity"]
-# # 가중치: 매칭 40%, 유사도 60%
-# final_score = match_score * 0.4 + similarity_score * 0.6
-# selected_product["final_score"] = final_score
-# reason = f"종합점수({final_score:.4f}) = 매칭({match_score:.4f})*0.4 + 유사도({similarity_score:.4f})*0.6"
-# logger.info(
-# f"종합 점수 계산: match_score={match_score:.4f}, similarity_score={similarity_score:.4f}, final_score={final_score:.4f}"
-# )
-# else:
-# reason = f"유사도({best_result['similarity']:.4f}) 기준 선택 ({analysis_mode})"
-#
-# logger.success(
-# f"상품 선택 완료: title='{selected_product['title'][:30]}', {reason}"
-# )
-#
-# return {
-# "job_id": request.job_id,
-# "schedule_id": request.schedule_id,
-# "schedule_his_id": request.schedule_his_id,
-# "keyword": keyword,
-# "selected_product": selected_product,
-# "reason": reason,
-# "status": "success",
-# }
-#
-# except Exception as e:
-# logger.error(
-# f"유사도 분석 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'"
-# )
-# raise InvalidItemDataException()
+from app.utils.similarity_analyzer import SimilarityAnalyzerONNX
+from app.errors.CustomException import InvalidItemDataException
+from ..model.schemas import RequestSadaguSimilarity
+from loguru import logger
+from app.utils.response import Response
+
+
+class SimilarityService:
+ def __init__(self):
+ pass
+
+ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict:
+ """
+ BERT 기반 유사도 분석 후 상품 선택 - 4단계
+ """
+ keyword = request.keyword
+ candidates = request.matched_products
+ fallback_products = request.search_results or []
+
+ logger.info(
+ f"유사도 분석 서비스 시작: keyword='{keyword}', matched_count={len(candidates) if candidates else 0}, fallback_count={len(fallback_products)}"
+ )
+
+ # 매칭된 상품이 없으면 전체 검색 결과로 폴백
+ if not candidates:
+ if not fallback_products:
+ logger.warning(
+ f"매칭된 상품과 검색 결과가 모두 없음: keyword='{keyword}'"
+ )
+
+ data = {
+ "keyword": keyword,
+ "selected_product": None,
+ "reason": "매칭된 상품과 검색 결과가 모두 없음",
+ }
+ return Response.ok(data, "매칭된 상품과 검색 결과가 모두 없습니다.")
+
+ logger.info("매칭된 상품 없음 → 전체 검색 결과에서 유사도 분석")
+ candidates = fallback_products
+ analysis_mode = "fallback_similarity_only"
+ else:
+ analysis_mode = "matched_products"
+
+ try:
+ analyzer = SimilarityAnalyzerONNX()
+
+ logger.info(
+ f"키워드 '{keyword}'와 {len(candidates)}개 상품의 유사도 분석 시작... (모드: {analysis_mode})"
+ )
+
+ # 한 개만 있으면 바로 선택
+ if len(candidates) == 1:
+ selected_product = candidates[0]
+
+ logger.info("단일 후보 상품 - 유사도 검증 진행")
+ # 유사도 계산
+ similarity = analyzer.calculate_similarity(
+ keyword, selected_product["title"]
+ )
+
+ # 폴백 모드에서는 임계값 검증
+ if analysis_mode == "fallback_similarity_only":
+ similarity_threshold = 0.3
+ if similarity < similarity_threshold:
+ logger.warning(
+ f"단일 상품 유사도 미달: similarity={similarity:.4f} < threshold={similarity_threshold}"
+ )
+ data = {
+ "keyword": keyword,
+ "selected_product": None,
+ "reason": f"단일 상품 유사도({similarity:.4f}) < 기준({similarity_threshold})",
+ }
+ return Response.ok(
+ data, "단일 상품 유사도 미달 되어 상품이 존재하지않습니다."
+ )
+
+ selected_product["similarity_info"] = {
+ "similarity_score": float(similarity),
+ "analysis_type": "single_candidate",
+ "analysis_mode": analysis_mode,
+ }
+
+ logger.success(
+ f"단일 상품 선택 완료: title='{selected_product['title'][:30]}', similarity={similarity:.4f}"
+ )
+ data = {
+ "keyword": keyword,
+ "selected_product": selected_product,
+ "reason": f"단일 상품 - 유사도: {similarity:.4f} ({analysis_mode})",
+ }
+ return Response.ok(data)
+
+ # 여러 개가 있으면 유사도 비교
+ logger.info("여러 상품 중 최고 유사도로 선택...")
+
+ # 제목만 추출해서 배치 분석
+ titles = [product["title"] for product in candidates]
+ similarity_results = analyzer.analyze_similarity_batch(keyword, titles)
+
+ # 결과 출력
+ logger.info("유사도 분석 결과:")
+ for i, result in enumerate(similarity_results[:5]): # 상위 5개만 로그
+ logger.info(
+ f" {i+1}위: {result['title'][:40]} | 유사도: {result['similarity']:.4f}"
+ )
+
+ # 최고 유사도 선택
+ best_result = similarity_results[0]
+ selected_product = candidates[best_result["index"]].copy()
+
+ # 폴백 모드에서는 임계값 검증
+ similarity_threshold = 0.3
+ if (
+ analysis_mode == "fallback_similarity_only"
+ and best_result["similarity"] < similarity_threshold
+ ):
+ logger.warning(
+ f"최고 유사도 미달: similarity={best_result['similarity']:.4f} < threshold={similarity_threshold}"
+ )
+ data = {
+ "keyword": keyword,
+ "selected_product": None,
+ "reason": f"최고 유사도({best_result['similarity']:.4f}) < 기준({similarity_threshold})",
+ }
+ return Response.ok(data, "최고 유사도가 기준보다 미달 되었습니다.")
+
+ # 유사도 정보 추가
+ selected_product["similarity_info"] = {
+ "similarity_score": best_result["similarity"],
+ "analysis_type": "multi_candidate_bert",
+ "analysis_mode": analysis_mode,
+ "rank": 1,
+ "total_candidates": len(candidates),
+ }
+
+ # 매칭 모드에서는 종합 점수도 계산
+ if analysis_mode == "matched_products" and "match_info" in selected_product:
+ match_score = selected_product["match_info"]["match_score"]
+ similarity_score = best_result["similarity"]
+ # 가중치: 매칭 40%, 유사도 60%
+ final_score = match_score * 0.4 + similarity_score * 0.6
+ selected_product["final_score"] = final_score
+ reason = f"종합점수({final_score:.4f}) = 매칭({match_score:.4f})*0.4 + 유사도({similarity_score:.4f})*0.6"
+ logger.info(
+ f"종합 점수 계산: match_score={match_score:.4f}, similarity_score={similarity_score:.4f}, final_score={final_score:.4f}"
+ )
+ else:
+ reason = f"유사도({best_result['similarity']:.4f}) 기준 선택 ({analysis_mode})"
+
+ logger.success(
+ f"상품 선택 완료: title='{selected_product['title'][:30]}', {reason}"
+ )
+ data = {
+ "keyword": keyword,
+ "selected_product": selected_product,
+ "reason": reason,
+ }
+ return Response.ok(data)
+
+ except Exception as e:
+ logger.error(f"유사도 분석 서비스 오류: keyword='{keyword}', error='{e}'")
+ raise InvalidItemDataException()
diff --git a/apps/pre-processing-service/app/test/test_blog_create_service.py b/apps/pre-processing-service/app/test/test_blog_create_service.py
new file mode 100644
index 00000000..d32e4e9e
--- /dev/null
+++ b/apps/pre-processing-service/app/test/test_blog_create_service.py
@@ -0,0 +1,83 @@
+import unittest
+from unittest.mock import patch, MagicMock
+
+from app.service.blog.blog_create_service import BlogContentService
+from app.model.schemas import RequestBlogCreate
+
+
+class TestBlogContentGeneration(unittest.TestCase):
+ """블로그 콘텐츠 생성 핵심 로직 테스트"""
+
+ @patch.dict("os.environ", {"OPENAI_API_KEY": "test-key"})
+ @patch("app.service.blog.blog_create_service.OpenAI")
+ def setUp(self, mock_openai_class):
+ """테스트 설정 - OpenAI Mock 적용"""
+ # Mock OpenAI 클라이언트 설정
+ self.mock_client = MagicMock()
+ mock_openai_class.return_value = self.mock_client
+
+ # 서비스 인스턴스 생성
+ self.service = BlogContentService()
+
+ def test_generate_blog_content_success(self):
+ """정상적인 콘텐츠 생성 테스트"""
+ # Mock 응답 설정
+ mock_choice = MagicMock()
+ mock_choice.message.content = """아이폰 15 케이스 완벽 가이드
+제품 소개
+이 케이스는 뛰어난 보호 성능을 제공합니다.
"""
+
+ mock_response = MagicMock()
+ mock_response.choices = [mock_choice]
+
+ self.mock_client.chat.completions.create.return_value = mock_response
+
+ # 테스트 요청
+ request = RequestBlogCreate(
+ keyword="아이폰 케이스",
+ product_info={"title": "아이폰 15 투명 케이스", "price": 25000},
+ )
+
+ # 실행
+ result = self.service.generate_blog_content(request)
+
+ # 검증
+ self.assertIn("title", result)
+ self.assertIn("content", result)
+ self.assertIn("tags", result)
+ # 실제 파싱 로직에 따른 제목 검증 (키워드가 제목에 포함되지 않아 기본 제목 생성됨)
+ self.assertEqual(
+ result["title"], "아이폰 15 투명 케이스 - 아이폰 케이스 완벽 가이드"
+ )
+ self.assertIn("", result["content"])
+ self.assertIn("아이폰 케이스", result["tags"])
+
+ def test_generate_blog_content_api_failure(self):
+ """API 실패 시 폴백 콘텐츠 생성 테스트"""
+ # API 실패 시뮬레이션
+ self.mock_client.chat.completions.create.side_effect = Exception("API Error")
+
+ request = RequestBlogCreate(keyword="테스트 키워드")
+
+ # 실행
+ result = self.service.generate_blog_content(request)
+
+ # 폴백 콘텐츠 검증
+ self.assertIn("title", result)
+ self.assertIn("content", result)
+ self.assertIn("tags", result)
+ self.assertEqual(result["title"], "테스트 키워드 - 완벽 가이드")
+
+ def test_generate_blog_content_minimal_input(self):
+ """최소한의 입력으로 콘텐츠 생성 테스트"""
+ # API 실패 시뮬레이션
+ self.mock_client.chat.completions.create.side_effect = Exception("API Error")
+
+ request = RequestBlogCreate()
+
+ result = self.service.generate_blog_content(request)
+
+ # 기본 콘텐츠 생성 확인
+ self.assertEqual(result["title"], "상품 정보 및 구매 가이드")
+ self.assertIn("", result["content"])
+ self.assertEqual(result["tags"], ["상품정보", "리뷰"])
diff --git a/apps/pre-processing-service/app/test/test_extraction_html.py b/apps/pre-processing-service/app/test/test_extraction_html.py
new file mode 100644
index 00000000..a023972c
--- /dev/null
+++ b/apps/pre-processing-service/app/test/test_extraction_html.py
@@ -0,0 +1,487 @@
+# if __name__ == "__main__":
+# from app.utils.crawling_util import CrawlingUtil
+# from app.utils.llm_extractor import LLMExtractor
+# from selenium.webdriver.common.by import By
+# from selenium.webdriver.support import expected_conditions as EC
+# from selenium.common.exceptions import TimeoutException
+# from selenium.webdriver.common.keys import Keys
+# from selenium.webdriver.common.action_chains import ActionChains
+# import pyperclip
+# import time
+# import json
+#
+# crawling_util = CrawlingUtil()
+# llm_extractor = LLMExtractor()
+#
+# start_time = time.time()
+# driver = crawling_util.get_driver()
+# wait_driver = crawling_util.get_wait()
+#
+# # ========== 로그인 부분 ==========
+# driver.get("https://nid.naver.com/nidlogin.login")
+# time.sleep(5)
+# html = driver.page_source
+#
+# print(f"원본 HTML 길이: {len(html)}")
+# html_list = preprocess_html(html)
+#
+# result_html = 0
+#
+# for html in html_list:
+# result_html += len(html)
+#
+# print(f"전처리된 HTML 총 길이: {result_html}, 분할된 청크 수: {len(html_list)}")
+#
+# result = []
+#
+# for idx, html in enumerate(html_list):
+# print(f"전처리된 HTML 길이: {len(html)}, List {idx}번 ")
+# prompt = llm_extractor.extraction_prompt("아이디, 비밀번호를 입력할 수 있는 요소, 로그인 버튼을 클릭할 수 있는 요소", html)
+#
+# response = llm_extractor.client.chat.completions.create(
+# model=llm_extractor.model,
+# messages=[{"role": "system", "content": prompt}],
+# temperature=0,
+# response_format={"type": "json_object"}
+# )
+#
+# result_json = response.choices[0].message.content
+#
+# result.append(result_json)
+#
+# parse_result = [json.loads(item) for item in result]
+# print(json.dumps(parse_result, indent=4, ensure_ascii=False))
+#
+# # 로그인
+# naver_id = "all2641"
+# naver_password = "kdyn2641*"
+#
+# # 모든 결과에서 요소들을 수집 (개선된 방식)
+# all_elements = {}
+#
+# for item in parse_result:
+# if not item.get("found"):
+# print("요소를 찾지 못했습니다.")
+# continue
+#
+# elements = item.get("elements", [])
+# for element in elements:
+# for key, value in element.items():
+# # ID 관련 요소
+# if "id" in key.lower():
+# if "css_selector" in key:
+# all_elements["id_css"] = value
+# elif "xpath" in key:
+# all_elements["id_xpath"] = value
+#
+# # Password 관련 요소
+# elif "password" in key.lower() or "pw" in key.lower():
+# if "css_selector" in key:
+# all_elements["pw_css"] = value
+# elif "xpath" in key:
+# all_elements["pw_xpath"] = value
+#
+# # Login 관련 요소
+# elif "login" in key.lower():
+# if "css_selector" in key:
+# all_elements["login_css"] = value
+# elif "xpath" in key:
+# all_elements["login_xpath"] = value
+#
+# print(f"수집된 요소들: {all_elements}")
+#
+# # 아이디 입력
+# id_input = None
+# if all_elements.get("id_css"):
+# try:
+# id_input = wait_driver.until(
+# EC.presence_of_element_located((By.CSS_SELECTOR, all_elements["id_css"]))
+# )
+# print(f"아이디 요소 발견 (CSS): {all_elements['id_css']}")
+# time.sleep(2)
+# except TimeoutException:
+# print(f"아이디 요소를 찾지 못했습니다 (CSS): {all_elements['id_css']}")
+#
+# if not id_input and all_elements.get("id_xpath"):
+# try:
+# id_input = wait_driver.until(
+# EC.presence_of_element_located((By.XPATH, all_elements["id_xpath"]))
+# )
+# print(f"아이디 요소 발견 (XPath): {all_elements['id_xpath']}")
+# time.sleep(2)
+# except TimeoutException:
+# print(f"아이디 요소를 찾지 못했습니다 (XPath): {all_elements['id_xpath']}")
+#
+# if id_input:
+# id_input.click()
+# time.sleep(1)
+# pyperclip.copy(naver_id)
+# time.sleep(1)
+# id_input.send_keys(Keys.COMMAND, "v")
+# time.sleep(1)
+#
+# # 비밀번호 입력
+# password_input = None
+# if all_elements.get("pw_css"):
+# try:
+# password_input = wait_driver.until(
+# EC.presence_of_element_located((By.CSS_SELECTOR, all_elements["pw_css"]))
+# )
+# print(f"비밀번호 요소 발견 (CSS): {all_elements['pw_css']}")
+# time.sleep(2)
+# except TimeoutException:
+# print(f"비밀번호 요소를 찾지 못했습니다 (CSS): {all_elements['pw_css']}")
+#
+# if not password_input and all_elements.get("pw_xpath"):
+# try:
+# password_input = wait_driver.until(
+# EC.presence_of_element_located((By.XPATH, all_elements["pw_xpath"]))
+# )
+# print(f"비밀번호 요소 발견 (XPath): {all_elements['pw_xpath']}")
+# time.sleep(2)
+# except TimeoutException:
+# print(f"비밀번호 요소를 찾지 못했습니다 (XPath): {all_elements['pw_xpath']}")
+#
+# if password_input:
+# password_input.click()
+# time.sleep(1)
+# pyperclip.copy(naver_password)
+# time.sleep(1)
+# password_input.send_keys(Keys.COMMAND, "v")
+# time.sleep(1)
+#
+# # 로그인 버튼 클릭
+# login_button = None
+# if all_elements.get("login_css"):
+# try:
+# login_selector = all_elements["login_css"].replace('\\', '')
+# login_button = wait_driver.until(
+# EC.element_to_be_clickable((By.CSS_SELECTOR, login_selector))
+# )
+# print(f"로그인 버튼 요소 발견 (CSS): {login_selector}")
+# except TimeoutException:
+# print(f"로그인 버튼 요소를 찾지 못했습니다 (CSS): {all_elements['login_css']}")
+#
+# if not login_button and all_elements.get("login_xpath"):
+# try:
+# login_button = wait_driver.until(
+# EC.element_to_be_clickable((By.XPATH, all_elements["login_xpath"]))
+# )
+# print(f"로그인 버튼 요소 발견 (XPath): {all_elements['login_xpath']}")
+# except TimeoutException:
+# print(f"로그인 버튼 요소를 찾지 못했습니다 (XPath): {all_elements['login_xpath']}")
+#
+# if login_button:
+# login_button.click()
+# print("로그인 버튼 클릭 완료")
+#
+# # 로그인 완료 대기
+# time.sleep(5)
+# print("로그인 완료, 블로그 포스팅 시작...")
+#
+# # ========== 블로그 포스팅 부분 (도움말 닫기 버튼 추가) ==========
+# try:
+# # 네이버 블로그 글쓰기 페이지로 이동
+# post_content_url = f"https://blog.naver.com/PostWriteForm.naver?blogId={naver_id}&Redirect=Write&redirect=Write&widgetTypeCall=true&noTrackingCode=true&directAccess=false"
+# driver.get(post_content_url)
+# print("블로그 글쓰기 페이지로 이동 완료. 5초 대기...")
+# time.sleep(10)
+#
+# blog_html = driver.page_source
+# print(f"HTML 길이: {len(blog_html)}")
+# blog_html_list = preprocess_html(blog_html)
+# blog_result_html = sum(len(html) for html in blog_html_list)
+# print(f"전처리된 HTML 총 길이: {blog_result_html}, 분할된 청크 수: {len(blog_html_list)}")
+#
+# # 테스트용 제목, 내용, 태그
+# test_title = "LLM 기반 자동화 포스팅"
+# test_content = "이 포스트는 LLM이 iframe 내부의 HTML을 분석하여 자동으로 작성한 글입니다."
+# test_tags = ["LLM", "자동화", "네이버블로그"]
+#
+# # 3. LLM을 사용해 iframe 내부의 블로그 요소들 추출
+# blog_result = []
+#
+# for idx, html in enumerate(blog_html_list):
+# print(f"HTML 청크 {idx + 1}/{len(blog_html_list)} 분석 중...")
+# prompt = llm_extractor.naver_post_extraction_prompt(html)
+# response = llm_extractor.client.chat.completions.create(
+# model=llm_extractor.model,
+# messages=[{"role": "system", "content": prompt}],
+# temperature=0,
+# response_format={"type": "json_object"}
+# )
+# blog_result.append(response.choices[0].message.content)
+#
+# blog_parse_result = [json.loads(item) for item in blog_result]
+# print("\n>> 블로그 요소 추출 결과:")
+# print(json.dumps(blog_parse_result, indent=4, ensure_ascii=False))
+#
+# # 4. 추출된 요소 정보 취합
+# blog_elements = {}
+# for item in blog_parse_result:
+# if not item.get("found"): continue
+# for element in item.get("elements", []):
+# for key, value in element.items():
+# if "title" in key.lower():
+# if "css_selector" in key:
+# blog_elements["title_css"] = value
+# elif "xpath" in key:
+# blog_elements["title_xpath"] = value
+# elif "content" in key.lower() or "body" in key.lower():
+# if "css_selector" in key:
+# blog_elements["content_css"] = value
+# elif "xpath" in key:
+# blog_elements["content_xpath"] = value
+# elif "help_close" in key.lower():
+# if "css_selector" in key:
+# blog_elements["help_close_css"] = value
+# elif "xpath" in key:
+# blog_elements["help_close_xpath"] = value
+# elif "first_publish" in key.lower():
+# if "css_selector" in key:
+# blog_elements["first_publish_css"] = value
+# elif "xpath" in key:
+# blog_elements["first_publish_xpath"] = value
+# elif "tag_input" in key.lower():
+# if "css_selector" in key:
+# blog_elements["tag_input_css"] = value
+# elif "xpath" in key:
+# blog_elements["tag_input_xpath"] = value
+# elif "final_publish" in key.lower():
+# if "css_selector" in key:
+# blog_elements["final_publish_css"] = value
+# elif "xpath" in key:
+# blog_elements["final_publish_xpath"] = value
+#
+# print(f"\n>> 수집된 블로그 요소들: {blog_elements}")
+#
+# # 5. 도움말 닫기 버튼 클릭 (발행 버튼이 가려지지 않도록)
+# help_close_button = None
+# help_close_css = blog_elements.get("help_close_css")
+# if help_close_css:
+# try:
+# help_close_button = wait_driver.until(EC.element_to_be_clickable((By.CSS_SELECTOR, help_close_css)))
+# print(f"✅ 도움말 닫기 버튼 발견 (CSS): {help_close_css}")
+# except TimeoutException:
+# print(f"⚠️ 도움말 닫기 버튼을 찾지 못했습니다 (CSS): {help_close_css}")
+#
+# if not help_close_button:
+# help_close_xpath = blog_elements.get("help_close_xpath")
+# if help_close_xpath:
+# try:
+# help_close_button = wait_driver.until(EC.element_to_be_clickable((By.XPATH, help_close_xpath)))
+# print(f"✅ 도움말 닫기 버튼 발견 (XPath): {help_close_xpath}")
+# except TimeoutException:
+# print(f"⚠️ 도움말 닫기 버튼을 찾지 못했습니다 (XPath): {help_close_xpath}")
+#
+# if help_close_button:
+# try:
+# help_close_button.click()
+# print("✅ 도움말 닫기 버튼 클릭 완료")
+# time.sleep(1) # 닫히는 시간 대기
+# except Exception as e:
+# print(f"⚠️ 도움말 닫기 버튼 클릭 실패: {str(e)}")
+# # JavaScript로 강제 클릭 시도
+# try:
+# driver.execute_script("arguments[0].click();", help_close_button)
+# print("✅ 도움말 닫기 버튼 JavaScript 클릭 완료")
+# time.sleep(1)
+# except Exception as js_e:
+# print(f"❌ 도움말 닫기 버튼 JavaScript 클릭도 실패: {str(js_e)}")
+# else:
+# print("⚠️ 도움말 닫기 버튼을 찾지 못했습니다. se-utils 요소 직접 제거를 시도합니다.")
+# # 직접 se-utils 요소 제거
+# try:
+# driver.execute_script("""
+# var element = document.querySelector('.se-utils');
+# if (element) {
+# element.style.display = 'none';
+# console.log('se-utils 요소를 숨겼습니다.');
+# }
+# """)
+# print("✅ se-utils 요소를 직접 숨김 처리했습니다.")
+# except Exception as e:
+# print(f"⚠️ se-utils 요소 숨김 처리 실패: {str(e)}")
+#
+# # 6. 제목 및 본문 입력 (CSS, XPath 순차 시도)
+# # 제목 입력
+# title_input = None
+# title_css = blog_elements.get("title_css")
+# if title_css:
+# try:
+# title_input = wait_driver.until(EC.element_to_be_clickable((By.CSS_SELECTOR, title_css)))
+# print(f"✅ 제목 요소 발견 (CSS): {title_css}")
+# except TimeoutException:
+# print(f"⚠️ 제목 요소를 찾지 못했습니다 (CSS): {title_css}")
+#
+# if not title_input:
+# title_xpath = blog_elements.get("title_xpath")
+# if title_xpath:
+# try:
+# title_input = wait_driver.until(EC.element_to_be_clickable((By.XPATH, title_xpath)))
+# print(f"✅ 제목 요소 발견 (XPath): {title_xpath}")
+# except TimeoutException:
+# print(f"⚠️ 제목 요소를 찾지 못했습니다 (XPath): {title_xpath}")
+#
+# if title_input:
+# ActionChains(driver).move_to_element(title_input).click().send_keys(test_title).perform()
+# print("✅ 제목 입력 완료")
+# else:
+# print("❌ 제목 입력 요소를 최종적으로 찾지 못했습니다.")
+#
+# # 본문 입력
+# content_input = None
+# content_css = blog_elements.get("content_css")
+# if content_css:
+# try:
+# content_input = wait_driver.until(EC.element_to_be_clickable((By.CSS_SELECTOR, content_css)))
+# print(f"✅ 본문 요소 발견 (CSS): {content_css}")
+# except TimeoutException:
+# print(f"⚠️ 본문 요소를 찾지 못했습니다 (CSS): {content_css}")
+#
+# if not content_input:
+# content_xpath = blog_elements.get("content_xpath")
+# if content_xpath:
+# try:
+# content_input = wait_driver.until(EC.element_to_be_clickable((By.XPATH, content_xpath)))
+# print(f"✅ 본문 요소 발견 (XPath): {content_xpath}")
+# except TimeoutException:
+# print(f"⚠️ 본문 요소를 찾지 못했습니다 (XPath): {content_xpath}")
+#
+# if content_input:
+# ActionChains(driver).move_to_element(content_input).click().send_keys(test_content).perform()
+# print("✅ 본문 입력 완료")
+# else:
+# print("❌ 본문 입력 요소를 최종적으로 찾지 못했습니다.")
+#
+# # 7. 발행 버튼 클릭 (LLM이 찾은 선택자 사용)
+# first_publish_button = None
+# first_publish_css = blog_elements.get("first_publish_css")
+# if first_publish_css:
+# try:
+# first_publish_button = wait_driver.until(
+# EC.element_to_be_clickable((By.CSS_SELECTOR, first_publish_css)))
+# print(f"✅ 첫 번째 발행 버튼 발견 (CSS): {first_publish_css}")
+# except TimeoutException:
+# print(f"⚠️ 첫 번째 발행 버튼을 찾지 못했습니다 (CSS): {first_publish_css}")
+#
+# if not first_publish_button:
+# first_publish_xpath = blog_elements.get("first_publish_xpath")
+# if first_publish_xpath:
+# try:
+# first_publish_button = wait_driver.until(
+# EC.element_to_be_clickable((By.XPATH, first_publish_xpath)))
+# print(f"✅ 첫 번째 발행 버튼 발견 (XPath): {first_publish_xpath}")
+# except TimeoutException:
+# print(f"⚠️ 첫 번째 발행 버튼을 찾지 못했습니다 (XPath): {first_publish_xpath}")
+#
+# if first_publish_button:
+# try:
+# # 일반 클릭 시도
+# first_publish_button.click()
+# print("✅ 첫 번째 발행 버튼 클릭 완료. 팝업창을 기다립니다...")
+# except Exception as click_error:
+# print(f"⚠️ 일반 클릭 실패, JavaScript 클릭 시도: {str(click_error)}")
+# driver.execute_script("arguments[0].click();", first_publish_button)
+# print("✅ 첫 번째 발행 버튼 JavaScript 클릭 완료. 팝업창을 기다립니다...")
+#
+# time.sleep(3)
+# else:
+# print("❌ 첫 번째 발행 버튼을 최종적으로 찾지 못했습니다. 하드코딩 선택자를 시도합니다.")
+# # 폴백: 하드코딩 선택자 사용
+# try:
+# publish_button = wait_driver.until(
+# EC.element_to_be_clickable((By.XPATH, "//button[.//span[normalize-space()='발행']]")))
+#
+# try:
+# publish_button.click()
+# print("✅ 발행 버튼 하드코딩 클릭 완료. 팝업창을 기다립니다...")
+# except Exception as click_error:
+# driver.execute_script("arguments[0].click();", publish_button)
+# print("✅ 발행 버튼 하드코딩 JavaScript 클릭 완료. 팝업창을 기다립니다...")
+#
+# time.sleep(3)
+# except TimeoutException:
+# print("❌ 하드코딩 발행 버튼도 찾지 못했습니다.")
+#
+# # 8. 태그 입력 및 최종 발행 (LLM이 찾은 선택자 사용)
+# try:
+# # 태그 입력 필드 찾기
+# tag_input = None
+# tag_input_css = blog_elements.get("tag_input_css")
+# if tag_input_css:
+# try:
+# tag_input = wait_driver.until(EC.element_to_be_clickable((By.CSS_SELECTOR, tag_input_css)))
+# print(f"✅ 태그 입력 필드 발견 (CSS): {tag_input_css}")
+# except TimeoutException:
+# print(f"⚠️ 태그 입력 필드를 찾지 못했습니다 (CSS): {tag_input_css}")
+#
+# if not tag_input:
+# tag_input_xpath = blog_elements.get("tag_input_xpath")
+# if tag_input_xpath:
+# try:
+# tag_input = wait_driver.until(EC.element_to_be_clickable((By.XPATH, tag_input_xpath)))
+# print(f"✅ 태그 입력 필드 발견 (XPath): {tag_input_xpath}")
+# except TimeoutException:
+# print(f"⚠️ 태그 입력 필드를 찾지 못했습니다 (XPath): {tag_input_xpath}")
+#
+# if not tag_input:
+# # 폴백: 하드코딩 선택자 사용
+# tag_input = wait_driver.until(EC.element_to_be_clickable((By.CSS_SELECTOR, "input[placeholder*='태그']")))
+# print("✅ 태그 입력 필드 하드코딩 선택자로 발견")
+#
+# # 태그 입력
+# for tag in test_tags:
+# tag_input.send_keys(tag)
+# tag_input.send_keys(Keys.ENTER)
+# time.sleep(0.5)
+# print("✅ 태그 입력 완료")
+#
+# # 최종 발행 버튼 찾기
+# final_publish_button = None
+# final_publish_css = blog_elements.get("final_publish_css")
+# if final_publish_css:
+# try:
+# final_publish_button = wait_driver.until(
+# EC.element_to_be_clickable((By.CSS_SELECTOR, final_publish_css)))
+# print(f"✅ 최종 발행 버튼 발견 (CSS): {final_publish_css}")
+# except TimeoutException:
+# print(f"⚠️ 최종 발행 버튼을 찾지 못했습니다 (CSS): {final_publish_css}")
+#
+# if not final_publish_button:
+# final_publish_xpath = blog_elements.get("final_publish_xpath")
+# if final_publish_xpath:
+# try:
+# final_publish_button = wait_driver.until(
+# EC.element_to_be_clickable((By.XPATH, final_publish_xpath)))
+# print(f"✅ 최종 발행 버튼 발견 (XPath): {final_publish_xpath}")
+# except TimeoutException:
+# print(f"⚠️ 최종 발행 버튼을 찾지 못했습니다 (XPath): {final_publish_xpath}")
+#
+# if not final_publish_button:
+# # 폴백: 하드코딩 선택자 사용
+# final_publish_button = wait_driver.until(EC.element_to_be_clickable(
+# (By.XPATH, "//div[contains(@class,'popup')]//button[.//span[normalize-space()='발행']]")))
+# print("✅ 최종 발행 버튼 하드코딩 선택자로 발견")
+#
+# # 최종 발행 버튼 클릭
+# final_publish_button.click()
+# print("✅ 최종 발행 버튼 클릭 완료!")
+#
+# wait_driver.until(EC.url_contains("PostView.naver"), timeout=10)
+# print("\n🎉 블로그 포스팅 발행 최종 완료! 🎉")
+# except TimeoutException:
+# print("❌ 발행 팝업 처리 중 오류가 발생했습니다.")
+# raise
+#
+# except Exception as e:
+# print(f"블로그 포스팅 중 오류 발생: {str(e)}")
+#
+# # ... (이후 전체 소요 시간 측정 및 드라이버 종료 코드) ...
+#
+# end_time = time.time()
+# print(f"전체 소요 시간: {end_time - start_time} seconds")
+#
+# # 대기 후 드라이버 종료
+# time.sleep(5)
+# driver.quit()
diff --git a/apps/pre-processing-service/app/test/test_keyword.py b/apps/pre-processing-service/app/test/test_keyword.py
index 2a96796e..82daefc8 100644
--- a/apps/pre-processing-service/app/test/test_keyword.py
+++ b/apps/pre-processing-service/app/test/test_keyword.py
@@ -1,19 +1,10 @@
import pytest
from fastapi.testclient import TestClient
from app.main import app
+from app.utils.response import Response
client = TestClient(app)
-JOB_ID = 1
-SCHEDULE_ID = 1
-SCHEDULE_HIS_ID = 1
-
-
-def test_read_root():
- response = client.get("/keywords/")
- assert response.status_code == 200
- assert response.json() == {"message": "keyword API"}
-
@pytest.mark.parametrize(
"tag, category, start_date, end_date",
@@ -26,9 +17,6 @@ def test_read_root():
)
def test_search(tag, category, start_date, end_date):
body = {
- "job_id": JOB_ID,
- "schedule_id": SCHEDULE_ID,
- "schedule_his_id": SCHEDULE_HIS_ID, # 오타 수정
"tag": tag,
"category": category,
"start_date": start_date,
@@ -39,9 +27,7 @@ def test_search(tag, category, start_date, end_date):
assert response.status_code == 200
response_data = response.json()
- assert response_data["job_id"] == body["job_id"]
- assert response_data["schedule_id"] == body["schedule_id"]
- assert response_data["schedule_his_id"] == body["schedule_his_id"] # 오타 수정
- assert response_data["status"] == "success"
- assert "keyword" in response_data
- assert isinstance(response_data["total_keyword"], dict)
+ assert response_data["success"] == True
+ assert response_data["status"] == "OK"
+ assert "keyword" in response_data["data"]
+ assert isinstance(response_data["data"]["total_keyword"], dict)
diff --git a/apps/pre-processing-service/app/test/test_match_service.py b/apps/pre-processing-service/app/test/test_match_service.py
index 7750cd3d..7deb043c 100644
--- a/apps/pre-processing-service/app/test/test_match_service.py
+++ b/apps/pre-processing-service/app/test/test_match_service.py
@@ -23,9 +23,6 @@ def test_match_success():
]
body = {
- "job_id": 1,
- "schedule_id": 1,
- "schedule_his_id": 1,
"keyword": "반지",
"search_results": sample_search_results,
}
@@ -35,14 +32,14 @@ def test_match_success():
assert response.status_code == 200
data = response.json()
- assert data["job_id"] == body["job_id"]
- assert data["keyword"] == body["keyword"]
- assert data["status"] == "success"
- assert isinstance(data["matched_products"], list)
+ assert data["success"] == True
+ assert data["status"] == "OK"
+ assert data["data"]["keyword"] == body["keyword"]
+ assert isinstance(data["data"]["matched_products"], list)
# 반지가 포함된 상품들이 매칭되어야 함
- if data["matched_products"]:
- for product in data["matched_products"]:
+ if data["data"]["matched_products"]:
+ for product in data["data"]["matched_products"]:
assert "match_info" in product
assert "match_type" in product["match_info"]
assert "match_score" in product["match_info"]
@@ -51,9 +48,6 @@ def test_match_success():
def test_match_no_results():
"""검색 결과가 없는 경우"""
body = {
- "job_id": 2,
- "schedule_id": 2,
- "schedule_his_id": 2,
"keyword": "반지",
"search_results": [],
}
@@ -63,7 +57,9 @@ def test_match_no_results():
assert response.status_code == 200
data = response.json()
- assert data["matched_products"] == []
+ assert data["success"] == True
+ assert data["status"] == "OK"
+ assert data["data"]["matched_products"] == []
def test_match_no_matches():
@@ -80,9 +76,6 @@ def test_match_no_matches():
]
body = {
- "job_id": 3,
- "schedule_id": 3,
- "schedule_his_id": 3,
"keyword": "반지",
"search_results": sample_search_results,
}
@@ -93,5 +86,6 @@ def test_match_no_matches():
assert response.status_code == 200
data = response.json()
# 매칭되지 않아도 성공으로 처리
- assert data["status"] == "success"
- assert isinstance(data["matched_products"], list)
+ assert data["success"] == True
+ assert data["status"] == "OK"
+ assert isinstance(data["data"]["matched_products"], list)
diff --git a/apps/pre-processing-service/app/test/test_sadagu_crawl.py b/apps/pre-processing-service/app/test/test_sadagu_crawl.py
index 6c6ad84a..72e4f0df 100644
--- a/apps/pre-processing-service/app/test/test_sadagu_crawl.py
+++ b/apps/pre-processing-service/app/test/test_sadagu_crawl.py
@@ -7,9 +7,6 @@
def test_crawl_success():
body = {
- "job_id": 1, # 문자열 -> 숫자로 수정
- "schedule_id": 1, # 문자열 -> 숫자로 수정
- "schedule_his_id": 1,
"tag": "detail",
"product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=886788894790",
"use_selenium": False,
@@ -21,10 +18,10 @@ def test_crawl_success():
assert response.status_code == 200
data = response.json()
- assert data["job_id"] == body["job_id"]
- assert data["schedule_id"] == body["schedule_id"]
- assert data["product_url"] == body["product_url"]
- assert "product_detail" in data
+ assert data["success"] == True
+ assert data["status"] == "OK"
+ assert data["data"]["product_url"] == body["product_url"]
+ assert "product_detail" in data["data"]
# def test_crawl_invalid_url():
@@ -39,7 +36,7 @@ def test_crawl_success():
# "include_images": False,
# }
#
-# response = client.post("/products/crawl", json=body)
+# response = client.post("/products/crawlers", json=body)
# print(f"Response: {response.json()}")
#
# assert response.status_code == 200
@@ -62,7 +59,7 @@ def test_crawl_success():
# "include_images": False,
# }
#
-# response = client.post("/products/crawl", json=body)
+# response = client.post("/products/crawlers", json=body)
# print(f"Response: {response.json()}")
#
# assert response.status_code in (400, 422, 500)
@@ -79,7 +76,7 @@ def test_crawl_success():
# "include_images": True,
# }
#
-# response = client.post("/products/crawl", json=body)
+# response = client.post("/products/crawlers", json=body)
# print(f"Response: {response.json()}")
#
# assert response.status_code == 200
diff --git a/apps/pre-processing-service/app/test/test_search_service.py b/apps/pre-processing-service/app/test/test_search_service.py
index fc64c9cd..7ee32252 100644
--- a/apps/pre-processing-service/app/test/test_search_service.py
+++ b/apps/pre-processing-service/app/test/test_search_service.py
@@ -7,22 +7,22 @@
def test_search_success():
"""상품 검색 성공 테스트"""
- body = {"job_id": 1, "schedule_id": 1, "schedule_his_id": 1, "keyword": "반지"}
+ body = {"keyword": "반지"}
response = client.post("/products/search", json=body)
print(f"Search Response: {response.json()}")
assert response.status_code == 200
data = response.json()
- assert data["job_id"] == body["job_id"]
- assert data["keyword"] == body["keyword"]
- assert data["status"] == "success"
- assert isinstance(data["search_results"], list)
+ assert data["success"] == True
+ assert data["status"] == "OK"
+ assert data["data"]["keyword"] == body["keyword"]
+ assert isinstance(data["data"]["search_results"], list)
def test_search_empty_keyword():
"""빈 키워드 검색 테스트"""
- body = {"job_id": 2, "schedule_id": 2, "schedule_his_id": 2, "keyword": ""}
+ body = {"keyword": ""}
response = client.post("/products/search", json=body)
print(f"Empty keyword response: {response.json()}")
@@ -30,15 +30,14 @@ def test_search_empty_keyword():
# 빈 키워드라도 에러가 아닌 빈 결과를 반환해야 함
assert response.status_code == 200
data = response.json()
- assert data["search_results"] == []
+ assert data["success"] == True
+ assert data["status"] == "OK"
+ assert data["data"]["search_results"] == []
def test_search_nonexistent_keyword():
"""존재하지 않는 키워드 검색"""
body = {
- "job_id": 3,
- "schedule_id": 3,
- "schedule_his_id": 3,
"keyword": "zxcvbnmasdfghjklqwertyuiop123456789",
}
@@ -48,5 +47,6 @@ def test_search_nonexistent_keyword():
assert response.status_code == 200
data = response.json()
# 검색 결과가 없어도 성공으로 처리
- assert data["status"] == "success"
- assert isinstance(data["search_results"], list)
+ assert data["success"] == True
+ assert data["status"] == "OK"
+ assert isinstance(data["data"]["search_results"], list)
diff --git a/apps/pre-processing-service/app/test/test_similarity_service.py b/apps/pre-processing-service/app/test/test_similarity_service.py
index cb84d3c3..6efbcdc1 100644
--- a/apps/pre-processing-service/app/test/test_similarity_service.py
+++ b/apps/pre-processing-service/app/test/test_similarity_service.py
@@ -29,9 +29,6 @@ def test_similarity_with_matched_products():
]
body = {
- "job_id": 1,
- "schedule_id": 1,
- "schedule_his_id": 1,
"keyword": "반지",
"matched_products": matched_products,
}
@@ -41,14 +38,14 @@ def test_similarity_with_matched_products():
assert response.status_code == 200
data = response.json()
- assert data["job_id"] == body["job_id"]
- assert data["keyword"] == body["keyword"]
- assert data["status"] == "success"
+ assert data["success"] == True
+ assert data["status"] == "OK"
+ assert data["data"]["keyword"] == body["keyword"]
- if data["selected_product"]:
- assert "similarity_info" in data["selected_product"]
- assert "similarity_score" in data["selected_product"]["similarity_info"]
- assert data["reason"] is not None
+ if data["data"]["selected_product"]:
+ assert "similarity_info" in data["data"]["selected_product"]
+ assert "similarity_score" in data["data"]["selected_product"]["similarity_info"]
+ assert data["data"]["reason"] is not None
def test_similarity_fallback_to_search_results():
@@ -65,9 +62,6 @@ def test_similarity_fallback_to_search_results():
]
body = {
- "job_id": 2,
- "schedule_id": 2,
- "schedule_his_id": 2,
"keyword": "반지",
"matched_products": [], # 매칭된 상품 없음
"search_results": search_results, # 폴백용
@@ -78,13 +72,14 @@ def test_similarity_fallback_to_search_results():
assert response.status_code == 200
data = response.json()
- assert data["status"] == "success"
+ assert data["success"] == True
+ assert data["status"] == "OK"
# 폴백 모드에서는 임계값을 통과한 경우에만 상품이 선택됨
- if data["selected_product"]:
- assert "similarity_info" in data["selected_product"]
+ if data["data"]["selected_product"]:
+ assert "similarity_info" in data["data"]["selected_product"]
assert (
- data["selected_product"]["similarity_info"]["analysis_mode"]
+ data["data"]["selected_product"]["similarity_info"]["analysis_mode"]
== "fallback_similarity_only"
)
@@ -100,9 +95,6 @@ def test_similarity_single_candidate():
]
body = {
- "job_id": 3,
- "schedule_id": 3,
- "schedule_his_id": 3,
"keyword": "반지",
"matched_products": single_product,
}
@@ -112,9 +104,11 @@ def test_similarity_single_candidate():
assert response.status_code == 200
data = response.json()
- assert data["selected_product"] is not None
+ assert data["success"] == True
+ assert data["status"] == "OK"
+ assert data["data"]["selected_product"] is not None
assert (
- data["selected_product"]["similarity_info"]["analysis_type"]
+ data["data"]["selected_product"]["similarity_info"]["analysis_type"]
== "single_candidate"
)
@@ -122,9 +116,6 @@ def test_similarity_single_candidate():
def test_similarity_no_candidates():
"""후보가 없는 경우"""
body = {
- "job_id": 4,
- "schedule_id": 4,
- "schedule_his_id": 4,
"keyword": "반지",
"matched_products": [],
"search_results": [],
@@ -135,5 +126,7 @@ def test_similarity_no_candidates():
assert response.status_code == 200
data = response.json()
- assert data["selected_product"] is None
- assert "검색 결과가 모두 없음" in data["reason"]
+ assert data["success"] == True
+ assert data["status"] == "OK"
+ assert data["data"]["selected_product"] is None
+ assert "검색 결과가 모두 없음" in data["data"]["reason"]
diff --git a/apps/pre-processing-service/app/utils/crawling_util.py b/apps/pre-processing-service/app/utils/crawling_util.py
index 8ec47518..5e50528d 100644
--- a/apps/pre-processing-service/app/utils/crawling_util.py
+++ b/apps/pre-processing-service/app/utils/crawling_util.py
@@ -1,59 +1,70 @@
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
+from typing import Optional
class CrawlingUtil:
+ """
+ 공통 Selenium WebDriver 유틸리티
+ 블로그 포스팅과 상품 크롤링 모두 지원
+ """
- def __init__(self):
- self.options = self._get_chrome_options()
- self.driver = None
-
- def _get_chrome_options(self):
+ def __init__(self, headless: bool = True, for_blog_posting: bool = False):
"""
- 크롬 옵션 설정
- 1. 헤드리스 모드 비활성화 (네이버 탐지 우회)
- 2. 샌드박스 비활성화
- 3. GPU 비활성화
- 4. 완전한 사용자 에이전트 설정
- 5. 자동화 탐지 우회 설정
+ :param headless: 헤드리스 모드 사용 여부
+ :param for_blog_posting: 블로그 포스팅용 설정 사용 여부
"""
+ self.headless = headless
+ self.for_blog_posting = for_blog_posting
+ self.options = self._get_chrome_options()
+ self.driver = None
+ def _get_chrome_options(self) -> Options:
+ """크롬 옵션 설정"""
options = Options()
- options.add_argument(
- "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36"
- )
- # options.add_argument('--headless') 백그라운드 실행시 주석 해제
+ # 기본 설정
options.add_argument("--no-sandbox")
options.add_argument("--disable-dev-shm-usage")
options.add_argument("--disable-gpu")
options.add_argument("--disable-extensions")
- options.add_experimental_option("excludeSwitches", ["enable-automation"])
- options.add_experimental_option("useAutomationExtension", False)
- options.add_argument("--disable-blink-features=AutomationControlled")
- return options
+ # 헤드리스 모드 설정
+ if self.headless:
+ options.add_argument("--headless")
+ options.add_argument("--window-size=1920,1080")
- def get_driver(self):
- """
- 셀레니움 웹 드라이버 반환
- :return: 셀레니움 웹 드라이버
- """
+ # 블로그 포스팅용 특별 설정 (네이버 탐지 우회)
+ if self.for_blog_posting:
+ options.add_argument(
+ "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36"
+ )
+ options.add_experimental_option("excludeSwitches", ["enable-automation"])
+ options.add_experimental_option("useAutomationExtension", False)
+ options.add_argument("--disable-blink-features=AutomationControlled")
+ else:
+ # 일반 크롤링용 설정
+ options.add_argument(
+ "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
+ )
+ return options
+
+ def get_driver(self) -> webdriver.Chrome:
+ """셀레니움 웹 드라이버 반환"""
if self.driver is None:
self.driver = webdriver.Chrome(options=self.options)
-
return self.driver
- def get_wait(self, timeout: int = 15):
- """
- WebDriverWait 객체 반환
- :param timeout: 대기 시간 (초)
- :return: WebDriverWait 객체
- """
-
+ def get_wait(self, timeout: int = 15) -> WebDriverWait:
+ """WebDriverWait 객체 반환"""
if self.driver is None:
self.get_driver()
-
return WebDriverWait(self.driver, timeout)
+
+ def close(self):
+ """드라이버 종료"""
+ if self.driver:
+ self.driver.quit()
+ self.driver = None
diff --git a/apps/pre-processing-service/app/utils/llm_extractor.py b/apps/pre-processing-service/app/utils/llm_extractor.py
new file mode 100644
index 00000000..3fb200a5
--- /dev/null
+++ b/apps/pre-processing-service/app/utils/llm_extractor.py
@@ -0,0 +1,253 @@
+import os
+from openai import OpenAI
+from dotenv import load_dotenv
+
+load_dotenv()
+
+
+class LLMExtractor:
+
+ def __init__(self, model="gpt-4o"):
+ """
+ LLMExtractor 초기화
+ :param model: 사용할 LLM 모델 이름
+ """
+
+ self.client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
+ self.model = model
+
+ def login_extraction_prompt(self, target_description: str, html: str):
+ """
+ 네이버, 티스토리 통합 로그인 프롬프트
+ :param html: 분석할 HTML
+ :param target_description: 추출 대상 설명
+ :return: 프롬프트 문자열
+ """
+
+ return f"""
+ # 지시 (Instructions):
+ 1. 당신은 HTML에서 웹 자동화에 필요한 정확한 요소를 찾는 전문가입니다.
+ 2. 당신의 임무는 사용자의 목표와 가장 일치하는 요소에 대한 CSS Selector와 XPath를 정확하게 찾아내어 지정된 JSON 형식으로 반환하는 것입니다.
+
+ # 규칙 (Rules):
+ 1. 만약 요청한 요소가 HTML 문서에 존재하지 않는다면, 반드시 {{"found": false}} 만 반환해야 합니다.
+ 2. 억지로 추측하거나 존재하지 않는 요소에 대한 정보를 생성하지 마세요.
+ 3. name에는 요소의 이름을 나타내도록 지정하세요. 예: id, password, login_button, title, body 등
+ 4. 반한되는 형식 :
+ {{
+ "found": true/false,
+ "elements": [
+ {{
+ "name_css_selector": "CSS 선택자 문자열",
+ "name_xpath": "XPath 문자열"
+ }},
+ ]
+ }}
+
+ # 수행 (Execution):
+ 사용자의 요구 사항 : {target_description}
+ HTML 문서 : {html}
+
+ """
+
+ def naver_post_extraction_prompt(self, html: str):
+ """
+ 네이버 블로그 포스트 프롬프트
+ :param html: 분석할 HTML
+ :return: 프롬프트 문자열
+ """
+
+ return f"""
+ # 지시 (Instructions):
+ 1. 당신은 HTML에서 웹 자동화에 필요한 정확한 요소를 찾는 전문가입니다.
+ 2. 당신의 임무는 목표(Goal)와 가장 일치하는 요소에 대한 CSS Selector와 XPath를 정확하게 찾아내어 지정된 JSON 형식으로 반환하는 것입니다.
+
+ # 규칙 (Rules):
+ 1. 만약 요청한 요소가 HTML 문서에 존재하지 않는다면, 반드시 {{"found": false}} 만 반환해야 합니다.
+ 2. 억지로 추측하거나 존재하지 않는 요소에 대한 정보를 생성하지 마세요.
+
+ # 목표 (Goal):
+
+ ## 제목 입력 영역 찾기:
+ "제목"이 포함된 요소 찾기
+ - HTML에서 "제목"이라는 한글 텍스트를 포함한 모든 요소 검색
+ - 이 요소와 같은 부모나 형제 관계에 있는 요소 찾기
+
+ ## 본문 입력 영역 찾기:
+ "본문"이 포함된 요소 찾기:
+ - HTML에서 "본문"이라는 한글 텍스트를 포함한 모든 요소 검색
+ - 이 요소와 같은 부모나 형제 관계에 있는 요소 찾기
+
+ # 도움말 닫기 버튼 찾기:
+ "도움말"이 포함된 요소 찾기:
+ - "도움말"이라는 한글 텍스트를 포함한 모든 요소 검색
+ - 이 요소와 같은 부모나 형제 관계에 있는 "닫기" 버튼 찾기
+
+ # 첫 번째 발행 버튼(팝업 열기용) 찾기:
+ "발행"이 포함된 버튼 요소 찾기:
+ - HTML에서 "발행"이라는 한글 텍스트를 포함한 모든 버튼
+ - 이 버튼이 팝업을 여는 역할을 하는지 확인
+
+ # 태그 입력 필드 찾기:
+ "tag"가 포함된 요소 찾기:
+ - HTML에서 "tag"라는 단어가 포함된 모든 요소 검색
+ - id나 placeholder에 "tag" or "태그" 관련 내용이 있는 것
+
+ # 최종 발행 버튼 찾기:
+ popup 내부의 발행 버튼 찾기:
+ - popup div 내부에 있는 "발행" 버튼
+ - confirm_btn 클래스가 포함된 버튼
+
+ # 반환 형식:
+ {{
+ "found": true/false,
+ "elements": [
+ {{
+ "title_css_selector": "제목 입력을 위한 요소의 CSS 선택자",
+ "title_xpath": "제목 입력을 위한 요소의 XPath"
+ }},
+ {{
+ "content_css_selector": "본문 입력을 위한 요소의 CSS 선택자",
+ "content_xpath": "본문 입력을 위한 요소의 XPath"
+ }},
+ {{
+ "help_close_css_selector": "도움말 닫기 버튼의 CSS 선택자",
+ "help_close_xpath": "도움말 닫기 버튼의 XPath"
+ }},
+ {{
+ "first_publish_css_selector": "첫 번째 발행 버튼(팝업 열기용)의 CSS 선택자",
+ "popup_publish_xpath": "첫 번째 발행 버튼(팝업 열기용)의 XPath"
+ }},
+ {{
+ "tag_input_css_selector": "태그 입력 필드의 CSS 선택자",
+ "tag_input_xpath": "태그 입력 필드의 XPath"
+ }},
+ {{
+ "final_publish_css_selector": "팝업 내의 발행 버튼의 CSS 선택자",
+ "final_publish_xpath": "팝업 내의 발행 버튼의 XPath"
+ }}
+ ]
+ }}
+
+ # 분석할 HTML:
+ {html}
+ """
+
+ def tistory_post_extraction_prompt(self, html: str):
+ """
+ 티스토리 기본 입력 요소들 (제목, 내용, 태그, 완료버튼) 추출 프롬프트
+ :param html: 분석할 HTML
+ :return: 프롬프트 문자열
+ """
+ return f"""
+ # 지시 (Instructions):
+ 1. 당신은 HTML에서 웹 자동화에 필요한 정확한 요소를 찾는 전문가입니다.
+ 2. 당신의 임무는 목표(Goal)와 가장 일치하는 요소에 대한 CSS Selector와 XPath를 정확하게 찾아내어 지정된 JSON 형식으로 반환하는 것입니다.
+
+ # 규칙 (Rules):
+ 1. 만약 요청한 요소가 HTML 문서에 존재하지 않는다면, 반드시 {{"found": false}} 만 반환해야 합니다.
+ 2. 억지로 추측하거나 존재하지 않는 요소에 대한 정보를 생성하지 마세요.
+
+ # 목표 (Goal):
+
+ ## 제목 입력 영역 찾기:
+ "제목"이 포함된 요소 찾기
+ - HTML에서 "제목"이라는 한글 텍스트를 포함한 모든 요소 검색
+ - 이 요소와 같은 부모나 형제 관계에 있는 요소 찾기
+
+ ## 글 내용 입력 영역 찾기:
+ "글 내용 입력"이 포함된 요소 찾기:
+ - iframe 내부의 요소 우선 검색
+ - "글 내용 입력"이라는 한글 텍스트를 포함한 요소 검색
+ - contenteditable="true" 속성을 가진 요소 우선 검색
+
+ # "tag" or "태그" 입력 필드 찾기:
+ "tag" or "태그"가 포함된 요소 찾기:
+ - HTML에서 "tag" or "태그"라는 텍스트를 포함한 모든 요소 검색
+ - id나 placeholder에 "tag" or "태그" 관련 내용이 있는 것
+
+ # 완료 버튼 찾기:
+ "완료"가 포함된 버튼 요소 찾기:
+ - HTML에서 정확히 "완료"라는 한글 텍스트를 포함한 모든 버튼
+ - 이 버튼이 글 작성을 완료하는 역할을 하는지 확인
+
+ # 반환 형식:
+ {{
+ "found": true/false,
+ "elements": [
+ {{
+ "title_css_selector": "제목 입력을 위한 요소의 CSS 선택자 또는 null",
+ "title_xpath": "제목 입력을 위한 요소의 XPath 또는 null"
+ }},
+ {{
+ "content_css_selector": "글 내용 입력을 위한 요소의 CSS 선택자 또는 null",
+ "content_xpath": "글 내용 입력을 위한 요소의 XPath 또는 null"
+ }},
+ {{
+ "tag_input_css_selector": "태그 입력 필드의 CSS 선택자 또는 null",
+ "tag_input_xpath": "태그 입력 필드의 XPath 또는 null"
+ }},
+ {{
+ "complete_css_selector": "완료 버튼의 CSS 선택자 또는 null",
+ "complete_xpath": "완료 버튼의 XPath 또는 null"
+ }}
+ ]
+ }}
+
+ # 분석할 HTML:
+ {html}
+ """
+
+ def tistory_publish_extraction_prompt(self, html: str):
+ """
+ 티스토리 발행 관련 요소들 (공개 라디오, 발행 버튼) 추출 프롬프트
+ 완료 버튼 클릭 후 동적으로 생성되는 요소들을 찾기 위한 프롬프트
+ :param html: 분석할 HTML (완료 버튼 클릭 후 업데이트된 HTML)
+ :return: 프롬프트 문자열
+ """
+ return f"""
+ # 지시 (Instructions):
+ 1. 당신은 HTML에서 웹 자동화에 필요한 정확한 요소를 찾는 전문가입니다.
+ 2. 당신의 임무는 목표(Goal)와 가장 일치하는 요소에 대한 CSS Selector와 XPath를 정확하게 찾아내어 지정된 JSON 형식으로 반환하는 것입니다.
+
+ # 규칙 (Rules):
+ 1. 만약 요청한 요소가 HTML 문서에 존재하지 않는다면, 반드시 {{"found": false}} 만 반환해야 합니다.
+ 2. 억지로 추측하거나 존재하지 않는 요소에 대한 정보를 생성하지 마세요.
+ 3. CSS 선택자에서 Selenium이 지원하지 않는 문법을 사용하지 마세요:
+ - :contains() 선택자 금지 (jQuery 전용)
+ - :visible, :hidden 같은 jQuery 전용 선택자 금지
+ - 표준 CSS 선택자만 사용 (id, class, attribute, tag 등)
+
+ # 목표 (Goal):
+
+ # 공개 radio 버튼 찾기:
+ "공개"가 포함된 radio 요소 찾기:
+ - input type="radio" 요소 우선 검색
+ - HTML에서 "공개"라는 한글 텍스트를 포함한 모든 radio 버튼
+ - 글의 공개/비공개 설정을 위한 라디오 버튼
+
+ # 발행 버튼 찾기:
+ "발행"이 포함된 버튼 요소 찾기:
+ - HTML에서 "발행"이라는 한글 텍스트를 포함한 모든 버튼
+ - "게시", "Publish" 등의 유사한 텍스트도 포함
+ - publish-btn, btn-publish 등의 id나 class를 가진 버튼 우선 검색
+ - 이 버튼이 최종적으로 글을 발행하는 역할을 하는지 확인
+
+ # 반환 형식:
+ {{
+ "found": true/false,
+ "elements": [
+ {{
+ "public_radio_css_selector": "공개 radio의 CSS 선택자 또는 null",
+ "public_radio_xpath": "공개 radio의 XPath 또는 null"
+ }},
+ {{
+ "publish_css_selector": "발행 버튼의 CSS 선택자 또는 null",
+ "publish_xpath": "발행 버튼의 XPath 또는 null"
+ }}
+ ]
+ }}
+
+ # 분석할 HTML:
+ {html}
+ """
diff --git a/apps/pre-processing-service/app/utils/preprocess_html.py b/apps/pre-processing-service/app/utils/preprocess_html.py
new file mode 100644
index 00000000..6edfb9d6
--- /dev/null
+++ b/apps/pre-processing-service/app/utils/preprocess_html.py
@@ -0,0 +1,210 @@
+from bs4 import BeautifulSoup, Comment
+from selenium.webdriver.common.by import By
+from selenium.webdriver.support import expected_conditions as EC
+import re
+
+
+def preprocess_html(html_content):
+ """
+ HTML 전처리
+ :param html_content: 원본 HTML 문자열
+ :return: 전처리된 HTML 문자열 리스트
+ """
+ soup = BeautifulSoup(html_content, "html.parser")
+
+ # 불필요한 태그 제거
+ unnecessary_tags = [
+ "script", # JavaScript 코드
+ "style", # CSS 스타일
+ "noscript", # JavaScript 비활성화 시 내용
+ "meta", # 메타데이터
+ "link", # 외부 리소스 링크 (중요한 것 제외)
+ "head", # head 전체
+ "title", # 페이지 제목
+ "base", # base URL
+ ]
+
+ for tag_name in unnecessary_tags:
+ for tag in soup.find_all(tag_name):
+ if tag_name == "link" and tag.get("rel") in ["stylesheet", "icon"]:
+ continue
+ tag.decompose()
+
+ # HTML 주석 제거
+ comments = soup.find_all(string=lambda text: isinstance(text, Comment))
+ for comment in comments:
+ comment.extract()
+
+ # display:none만 제거하고 다른 숨김 요소는 보존
+ hidden_elements = soup.find_all(
+ attrs={"style": re.compile(r"display\s*:\s*none", re.I)}
+ )
+ for element in hidden_elements:
+ element.decompose()
+
+ # 중요한 속성들을 보존
+ important_attributes = {
+ "id",
+ "class",
+ "name",
+ "type",
+ "value",
+ "href",
+ "src",
+ "alt",
+ "title",
+ "placeholder",
+ "role",
+ "aria-label",
+ "aria-describedby",
+ "aria-expanded",
+ "onclick",
+ "onchange",
+ "onfocus",
+ "onblur",
+ "disabled",
+ "readonly",
+ "required",
+ "checked",
+ "selected",
+ "hidden",
+ "tabindex",
+ "contenteditable",
+ "spellcheck",
+ "autocomplete",
+ "maxlength",
+ "minlength",
+ "for",
+ "form",
+ "method",
+ "action",
+ "target",
+ }
+
+ for tag in soup.find_all(True):
+ attrs_to_remove = []
+ for attr_name in tag.attrs.keys():
+ # data-* 속성은 모두 보존
+ if attr_name.startswith("data-"):
+ continue
+ # aria-* 속성도 모두 보존
+ if attr_name.startswith("aria-"):
+ continue
+ # on* 이벤트 속성들도 보존
+ if attr_name.startswith("on"):
+ continue
+ # 중요 속성이 아니면 제거
+ if attr_name not in important_attributes:
+ attrs_to_remove.append(attr_name)
+
+ for attr_name in attrs_to_remove:
+ del tag.attrs[attr_name]
+
+ # 빈 태그 제거
+ interactive_tags = {
+ "input",
+ "button",
+ "select",
+ "textarea",
+ "a",
+ "img",
+ "br",
+ "hr",
+ "div",
+ "span",
+ }
+
+ def remove_empty_tags_conservative():
+ removed_any = True
+ iteration = 0
+ while removed_any and iteration < 3: # 최대 3번만 반복
+ removed_any = False
+ iteration += 1
+
+ for tag in soup.find_all():
+ # 상호작용 가능한 태그는 보존
+ if tag.name in interactive_tags:
+ continue
+
+ # contenteditable 속성이 있으면 보존
+ if tag.get("contenteditable"):
+ continue
+
+ # data-* 속성이 있으면 보존
+ if any(attr.startswith("data-") for attr in tag.attrs.keys()):
+ continue
+
+ # 텍스트도 없고 자식 요소도 없으면 제거
+ if not tag.get_text(strip=True) and not tag.find_all():
+ tag.decompose()
+ removed_any = True
+
+ remove_empty_tags_conservative()
+
+ # 연속된 공백 정리
+ for text_node in soup.find_all(string=True):
+ if text_node.parent.name not in ["script", "style"]:
+ cleaned_text = re.sub(r"\s+", " ", str(text_node))
+ if cleaned_text != str(text_node):
+ text_node.replace_with(cleaned_text)
+
+ html_list = _chunking_html(str(soup))
+ return html_list
+
+
+def _chunking_html(html_content, chunk_size=50000):
+ """
+ HTML을 지정된 크기로 분할하는 메서드
+ :param html_content: 원본 HTML 문자열
+ :param chunk_size: 각 청크의 최대 크기 (문자 수)
+ :return: HTML 청크 리스트
+ """
+ chunks = []
+ for i in range(0, len(html_content), chunk_size):
+ chunks.append(html_content[i : i + chunk_size])
+ return chunks
+
+
+def wait_for_tistory_editor_complete(driver, timeout=30):
+ """
+ 티스토리 TinyMCE 에디터가 완전히 로드될 때까지 대기
+ """
+ from selenium.webdriver.support.ui import WebDriverWait
+
+ wait = WebDriverWait(driver, timeout)
+
+ # 페이지 기본 로딩
+ wait.until(lambda d: d.execute_script("return document.readyState") == "complete")
+
+ # TinyMCE 라이브러리 로딩
+ wait.until(lambda d: d.execute_script("return typeof tinymce !== 'undefined'"))
+
+ # 에디터 인스턴스 초기화
+ wait.until(
+ lambda d: d.execute_script(
+ """
+ return tinymce.get('editor-tistory') &&
+ tinymce.get('editor-tistory').initialized
+ """
+ )
+ )
+
+ # iframe 준비
+ wait.until(EC.presence_of_element_located((By.ID, "editor-tistory_ifr")))
+
+ # iframe 내부 document 준비
+ wait.until(
+ lambda d: d.execute_script(
+ """
+ try {
+ var editor = tinymce.get('editor-tistory');
+ var doc = editor.getDoc();
+ return doc && doc.readyState === 'complete';
+ } catch (e) {
+ return false;
+ }
+ """
+ )
+ )
+
+ return True
diff --git a/apps/pre-processing-service/app/utils/response.py b/apps/pre-processing-service/app/utils/response.py
new file mode 100644
index 00000000..305f080e
--- /dev/null
+++ b/apps/pre-processing-service/app/utils/response.py
@@ -0,0 +1,25 @@
+class Response:
+ @staticmethod
+ def ok(data: dict, message: str = "OK") -> dict:
+ """성공 응답"""
+ return {"success": True, "data": data, "status": "OK", "message": message}
+
+ @staticmethod
+ def error(message: str = "오류가 발생했습니다", data: dict = None) -> dict:
+ """에러 응답"""
+ return {
+ "success": False,
+ "data": data or {},
+ "status": "ERROR",
+ "message": message,
+ }
+
+ @staticmethod
+ def not_found(message: str = "결과를 찾을 수 없습니다", data: dict = None) -> dict:
+ """검색 결과 없음"""
+ return {
+ "success": True, # 에러가 아닌 정상 처리
+ "data": data or {},
+ "status": "NOT_FOUND",
+ "message": message,
+ }
diff --git a/apps/pre-processing-service/app/utils/similarity_analyzer.py b/apps/pre-processing-service/app/utils/similarity_analyzer.py
index a5e855e7..d4e7c0c5 100644
--- a/apps/pre-processing-service/app/utils/similarity_analyzer.py
+++ b/apps/pre-processing-service/app/utils/similarity_analyzer.py
@@ -1,110 +1,93 @@
-# import torch
-# import numpy as np
-# from sklearn.metrics.pairwise import cosine_similarity
-# from transformers import AutoTokenizer, AutoModel
-# from loguru import logger
-#
-#
-# class SimilarityAnalyzer:
-# """텍스트 유사도 분석기"""
-#
-# def __init__(self):
-# try:
-# logger.info("KLUE BERT 모델 로딩 시도 중...")
-# self.tokenizer = AutoTokenizer.from_pretrained("klue/bert-base")
-# self.model = AutoModel.from_pretrained("klue/bert-base")
-# logger.success("KLUE BERT 모델 로딩 성공")
-# except Exception as e:
-# logger.warning(f"KLUE BERT 로딩 실패, 다국어 BERT로 대체: {e}")
-# try:
-# logger.info("다국어 BERT 모델 로딩 시도 중...")
-# self.tokenizer = AutoTokenizer.from_pretrained(
-# "bert-base-multilingual-cased"
-# )
-# self.model = AutoModel.from_pretrained("bert-base-multilingual-cased")
-# logger.success("다국어 BERT 모델 로딩 성공")
-# except Exception as e2:
-# logger.error(f"모든 BERT 모델 로딩 실패: {e2}")
-# raise e2
-#
-# def get_embedding(self, text: str) -> np.ndarray:
-# """텍스트 임베딩 생성"""
-# try:
-# logger.debug(f"임베딩 생성 시작: text='{text[:50]}'")
-# inputs = self.tokenizer(
-# text, return_tensors="pt", padding=True, truncation=True, max_length=128
-# )
-# with torch.no_grad():
-# outputs = self.model(**inputs)
-# embedding = outputs.last_hidden_state[:, 0, :].numpy()
-# logger.debug(f"임베딩 생성 완료: shape={embedding.shape}")
-# return embedding
-# except Exception as e:
-# logger.error(f"임베딩 생성 오류: text='{text[:30]}', error='{e}'")
-# raise
-#
-# def calculate_similarity(self, text1: str, text2: str) -> float:
-# """두 텍스트 간 유사도 계산"""
-# try:
-# logger.debug(
-# f"유사도 계산 시작: text1='{text1[:30]}', text2='{text2[:30]}'"
-# )
-# embedding1 = self.get_embedding(text1)
-# embedding2 = self.get_embedding(text2)
-# similarity = cosine_similarity(embedding1, embedding2)[0][0]
-# logger.debug(f"유사도 계산 완료: similarity={similarity:.4f}")
-# return similarity
-# except Exception as e:
-# logger.error(
-# f"유사도 계산 오류: text1='{text1[:30]}', text2='{text2[:30]}', error='{e}'"
-# )
-# raise
-#
-# def analyze_similarity_batch(
-# self, keyword: str, product_titles: list[str]
-# ) -> list[dict]:
-# """배치로 유사도 분석"""
-# logger.info(
-# f"배치 유사도 분석 시작: keyword='{keyword}', titles_count={len(product_titles)}"
-# )
-#
-# try:
-# keyword_embedding = self.get_embedding(keyword)
-# results = []
-#
-# for i, title in enumerate(product_titles):
-# try:
-# logger.debug(
-# f"유사도 계산 중 ({i + 1}/{len(product_titles)}): title='{title[:30]}'"
-# )
-# title_embedding = self.get_embedding(title)
-# similarity = cosine_similarity(keyword_embedding, title_embedding)[
-# 0
-# ][0]
-#
-# results.append(
-# {
-# "index": i,
-# "title": title,
-# "similarity": float(similarity),
-# "score": float(similarity),
-# }
-# )
-# logger.debug(
-# f"유사도 계산 완료 ({i + 1}/{len(product_titles)}): similarity={similarity:.4f}"
-# )
-# except Exception as e:
-# logger.error(f"유사도 계산 오류 (제목: {title[:30]}): {e}")
-# results.append(
-# {"index": i, "title": title, "similarity": 0.0, "score": 0.0}
-# )
-#
-# # 유사도 기준 내림차순 정렬
-# results.sort(key=lambda x: x["similarity"], reverse=True)
-# logger.info(
-# f"배치 유사도 분석 완료: 총 {len(results)}개, 최고 유사도={results[0]['similarity']:.4f}"
-# )
-# return results
-# except Exception as e:
-# logger.error(f"배치 유사도 분석 실패: keyword='{keyword}', error='{e}'")
-# raise
+import numpy as np
+from sklearn.metrics.pairwise import cosine_similarity
+from loguru import logger
+import onnxruntime as ort
+from transformers import AutoTokenizer
+
+
+class SimilarityAnalyzerONNX:
+ """ONNX 기반 텍스트 유사도 분석기"""
+
+ def __init__(self, model_path: str = "klue_bert.onnx"):
+ try:
+ logger.info("토크나이저 로딩 중: klue/bert-base")
+ self.tokenizer = AutoTokenizer.from_pretrained("klue/bert-base")
+ logger.info(f"ONNX 모델 로딩 중: {model_path}")
+ self.ort_session = ort.InferenceSession(model_path)
+ logger.success("ONNX 모델 로딩 성공")
+ except Exception as e:
+ logger.error(f"모델 로딩 실패: {e}")
+ raise e
+
+ def get_embedding(self, text: str) -> np.ndarray:
+ """텍스트 임베딩 생성 (ONNX)"""
+ try:
+ logger.debug(f"임베딩 생성 시작: text='{text[:50]}'")
+ # 토큰화
+ inputs = self.tokenizer(
+ text, return_tensors="np", padding=True, truncation=True, max_length=128
+ )
+ ort_inputs = {
+ "input_ids": inputs["input_ids"].astype(np.int64),
+ "attention_mask": inputs["attention_mask"].astype(np.int64),
+ }
+ ort_outs = self.ort_session.run(None, ort_inputs)
+ embedding = ort_outs[0][:, 0, :] # [CLS] 토큰 임베딩
+ logger.debug(f"임베딩 생성 완료: shape={embedding.shape}")
+ return embedding
+ except Exception as e:
+ logger.error(f"임베딩 생성 오류: text='{text[:30]}', error='{e}'")
+ raise
+
+ def calculate_similarity(self, text1: str, text2: str) -> float:
+ """두 텍스트 간 유사도 계산"""
+ try:
+ logger.debug(
+ f"유사도 계산 시작: text1='{text1[:30]}', text2='{text2[:30]}'"
+ )
+ emb1 = self.get_embedding(text1)
+ emb2 = self.get_embedding(text2)
+ similarity = cosine_similarity(emb1, emb2)[0][0]
+ logger.debug(f"유사도 계산 완료: similarity={similarity:.4f}")
+ return similarity
+ except Exception as e:
+ logger.error(f"유사도 계산 오류: {e}")
+ raise
+
+ def analyze_similarity_batch(
+ self, keyword: str, product_titles: list[str]
+ ) -> list[dict]:
+ """배치 유사도 분석"""
+ logger.info(
+ f"배치 유사도 분석 시작: keyword='{keyword}', titles_count={len(product_titles)}"
+ )
+ try:
+ keyword_emb = self.get_embedding(keyword)
+ results = []
+
+ for i, title in enumerate(product_titles):
+ try:
+ title_emb = self.get_embedding(title)
+ sim = cosine_similarity(keyword_emb, title_emb)[0][0]
+ results.append(
+ {
+ "index": i,
+ "title": title,
+ "similarity": float(sim),
+ "score": float(sim),
+ }
+ )
+ except Exception as e:
+ logger.error(f"유사도 계산 오류 (제목: {title[:30]}): {e}")
+ results.append(
+ {"index": i, "title": title, "similarity": 0.0, "score": 0.0}
+ )
+
+ results.sort(key=lambda x: x["similarity"], reverse=True)
+ logger.info(
+ f"배치 유사도 분석 완료: 총 {len(results)}개, 최고 유사도={results[0]['similarity']:.4f}"
+ )
+ return results
+ except Exception as e:
+ logger.error(f"배치 유사도 분석 실패: {e}")
+ raise
diff --git a/apps/pre-processing-service/poetry.lock b/apps/pre-processing-service/poetry.lock
index e65412d3..a1729b8b 100644
--- a/apps/pre-processing-service/poetry.lock
+++ b/apps/pre-processing-service/poetry.lock
@@ -1,4 +1,140 @@
-# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
+# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand.
+
+[[package]]
+name = "aiohappyeyeballs"
+version = "2.6.1"
+description = "Happy Eyeballs for asyncio"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"},
+ {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"},
+]
+
+[[package]]
+name = "aiohttp"
+version = "3.12.15"
+description = "Async http client/server framework (asyncio)"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "aiohttp-3.12.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b6fc902bff74d9b1879ad55f5404153e2b33a82e72a95c89cec5eb6cc9e92fbc"},
+ {file = "aiohttp-3.12.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:098e92835b8119b54c693f2f88a1dec690e20798ca5f5fe5f0520245253ee0af"},
+ {file = "aiohttp-3.12.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:40b3fee496a47c3b4a39a731954c06f0bd9bd3e8258c059a4beb76ac23f8e421"},
+ {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ce13fcfb0bb2f259fb42106cdc63fa5515fb85b7e87177267d89a771a660b79"},
+ {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3beb14f053222b391bf9cf92ae82e0171067cc9c8f52453a0f1ec7c37df12a77"},
+ {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c39e87afe48aa3e814cac5f535bc6199180a53e38d3f51c5e2530f5aa4ec58c"},
+ {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5f1b4ce5bc528a6ee38dbf5f39bbf11dd127048726323b72b8e85769319ffc4"},
+ {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1004e67962efabbaf3f03b11b4c43b834081c9e3f9b32b16a7d97d4708a9abe6"},
+ {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8faa08fcc2e411f7ab91d1541d9d597d3a90e9004180edb2072238c085eac8c2"},
+ {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fe086edf38b2222328cdf89af0dde2439ee173b8ad7cb659b4e4c6f385b2be3d"},
+ {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:79b26fe467219add81d5e47b4a4ba0f2394e8b7c7c3198ed36609f9ba161aecb"},
+ {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b761bac1192ef24e16706d761aefcb581438b34b13a2f069a6d343ec8fb693a5"},
+ {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e153e8adacfe2af562861b72f8bc47f8a5c08e010ac94eebbe33dc21d677cd5b"},
+ {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fc49c4de44977aa8601a00edbf157e9a421f227aa7eb477d9e3df48343311065"},
+ {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2776c7ec89c54a47029940177e75c8c07c29c66f73464784971d6a81904ce9d1"},
+ {file = "aiohttp-3.12.15-cp310-cp310-win32.whl", hash = "sha256:2c7d81a277fa78b2203ab626ced1487420e8c11a8e373707ab72d189fcdad20a"},
+ {file = "aiohttp-3.12.15-cp310-cp310-win_amd64.whl", hash = "sha256:83603f881e11f0f710f8e2327817c82e79431ec976448839f3cd05d7afe8f830"},
+ {file = "aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117"},
+ {file = "aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe"},
+ {file = "aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9"},
+ {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5"},
+ {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728"},
+ {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16"},
+ {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0"},
+ {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b"},
+ {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd"},
+ {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8"},
+ {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50"},
+ {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676"},
+ {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7"},
+ {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7"},
+ {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685"},
+ {file = "aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b"},
+ {file = "aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d"},
+ {file = "aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7"},
+ {file = "aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444"},
+ {file = "aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d"},
+ {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c"},
+ {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0"},
+ {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab"},
+ {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb"},
+ {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545"},
+ {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c"},
+ {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd"},
+ {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f"},
+ {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d"},
+ {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519"},
+ {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea"},
+ {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3"},
+ {file = "aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1"},
+ {file = "aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34"},
+ {file = "aiohttp-3.12.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315"},
+ {file = "aiohttp-3.12.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd"},
+ {file = "aiohttp-3.12.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4"},
+ {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7"},
+ {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d"},
+ {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b"},
+ {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d"},
+ {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d"},
+ {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645"},
+ {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461"},
+ {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9"},
+ {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d"},
+ {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693"},
+ {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64"},
+ {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51"},
+ {file = "aiohttp-3.12.15-cp313-cp313-win32.whl", hash = "sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0"},
+ {file = "aiohttp-3.12.15-cp313-cp313-win_amd64.whl", hash = "sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84"},
+ {file = "aiohttp-3.12.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:691d203c2bdf4f4637792efbbcdcd157ae11e55eaeb5e9c360c1206fb03d4d98"},
+ {file = "aiohttp-3.12.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e995e1abc4ed2a454c731385bf4082be06f875822adc4c6d9eaadf96e20d406"},
+ {file = "aiohttp-3.12.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bd44d5936ab3193c617bfd6c9a7d8d1085a8dc8c3f44d5f1dcf554d17d04cf7d"},
+ {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46749be6e89cd78d6068cdf7da51dbcfa4321147ab8e4116ee6678d9a056a0cf"},
+ {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0c643f4d75adea39e92c0f01b3fb83d57abdec8c9279b3078b68a3a52b3933b6"},
+ {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0a23918fedc05806966a2438489dcffccbdf83e921a1170773b6178d04ade142"},
+ {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74bdd8c864b36c3673741023343565d95bfbd778ffe1eb4d412c135a28a8dc89"},
+ {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a146708808c9b7a988a4af3821379e379e0f0e5e466ca31a73dbdd0325b0263"},
+ {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7011a70b56facde58d6d26da4fec3280cc8e2a78c714c96b7a01a87930a9530"},
+ {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3bdd6e17e16e1dbd3db74d7f989e8af29c4d2e025f9828e6ef45fbdee158ec75"},
+ {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:57d16590a351dfc914670bd72530fd78344b885a00b250e992faea565b7fdc05"},
+ {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bc9a0f6569ff990e0bbd75506c8d8fe7214c8f6579cca32f0546e54372a3bb54"},
+ {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:536ad7234747a37e50e7b6794ea868833d5220b49c92806ae2d7e8a9d6b5de02"},
+ {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f0adb4177fa748072546fb650d9bd7398caaf0e15b370ed3317280b13f4083b0"},
+ {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14954a2988feae3987f1eb49c706bff39947605f4b6fa4027c1d75743723eb09"},
+ {file = "aiohttp-3.12.15-cp39-cp39-win32.whl", hash = "sha256:b784d6ed757f27574dca1c336f968f4e81130b27595e458e69457e6878251f5d"},
+ {file = "aiohttp-3.12.15-cp39-cp39-win_amd64.whl", hash = "sha256:86ceded4e78a992f835209e236617bffae649371c4a50d5e5a3987f237db84b8"},
+ {file = "aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2"},
+]
+
+[package.dependencies]
+aiohappyeyeballs = ">=2.5.0"
+aiosignal = ">=1.4.0"
+attrs = ">=17.3.0"
+frozenlist = ">=1.1.1"
+multidict = ">=4.5,<7.0"
+propcache = ">=0.2.0"
+yarl = ">=1.17.0,<2.0"
+
+[package.extras]
+speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.3.0)", "brotlicffi ; platform_python_implementation != \"CPython\""]
+
+[[package]]
+name = "aiosignal"
+version = "1.4.0"
+description = "aiosignal: a list of registered asynchronous callbacks"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e"},
+ {file = "aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7"},
+]
+
+[package.dependencies]
+frozenlist = ">=1.1.0"
+typing-extensions = {version = ">=4.2", markers = "python_version < \"3.13\""}
[[package]]
name = "annotated-types"
@@ -438,6 +574,24 @@ files = [
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
+[[package]]
+name = "coloredlogs"
+version = "15.0.1"
+description = "Colored terminal output for Python's logging module"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+groups = ["main"]
+files = [
+ {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"},
+ {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"},
+]
+
+[package.dependencies]
+humanfriendly = ">=9.1"
+
+[package.extras]
+cron = ["capturer (>=2.4)"]
+
[[package]]
name = "dbutils"
version = "3.1.2"
@@ -455,21 +609,33 @@ docs = ["docutils"]
pg = ["PyGreSQL (>=5)"]
tests = ["pytest (>=7)", "ruff"]
+[[package]]
+name = "distro"
+version = "1.9.0"
+description = "Distro - an OS platform information API"
+optional = false
+python-versions = ">=3.6"
+groups = ["main"]
+files = [
+ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"},
+ {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
+]
+
[[package]]
name = "fastapi"
-version = "0.116.1"
+version = "0.116.2"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
- {file = "fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565"},
- {file = "fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143"},
+ {file = "fastapi-0.116.2-py3-none-any.whl", hash = "sha256:c3a7a8fb830b05f7e087d920e0d786ca1fc9892eb4e9a84b227be4c1bc7569db"},
+ {file = "fastapi-0.116.2.tar.gz", hash = "sha256:231a6af2fe21cfa2c32730170ad8514985fc250bec16c9b242d3b94c835ef529"},
]
[package.dependencies]
pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0"
-starlette = ">=0.40.0,<0.48.0"
+starlette = ">=0.40.0,<0.49.0"
typing-extensions = ">=4.8.0"
[package.extras]
@@ -477,6 +643,184 @@ all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>
standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"]
standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"]
+[[package]]
+name = "filelock"
+version = "3.19.1"
+description = "A platform independent file lock."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d"},
+ {file = "filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58"},
+]
+
+[[package]]
+name = "flatbuffers"
+version = "25.2.10"
+description = "The FlatBuffers serialization format for Python"
+optional = false
+python-versions = "*"
+groups = ["main"]
+files = [
+ {file = "flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051"},
+ {file = "flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e"},
+]
+
+[[package]]
+name = "frozenlist"
+version = "1.7.0"
+description = "A list-like structure which implements collections.abc.MutableSequence"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a"},
+ {file = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61"},
+ {file = "frozenlist-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0fd1bad056a3600047fb9462cff4c5322cebc59ebf5d0a3725e0ee78955001d"},
+ {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3789ebc19cb811163e70fe2bd354cea097254ce6e707ae42e56f45e31e96cb8e"},
+ {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af369aa35ee34f132fcfad5be45fbfcde0e3a5f6a1ec0712857f286b7d20cca9"},
+ {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac64b6478722eeb7a3313d494f8342ef3478dff539d17002f849101b212ef97c"},
+ {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f89f65d85774f1797239693cef07ad4c97fdd0639544bad9ac4b869782eb1981"},
+ {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1073557c941395fdfcfac13eb2456cb8aad89f9de27bae29fabca8e563b12615"},
+ {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed8d2fa095aae4bdc7fdd80351009a48d286635edffee66bf865e37a9125c50"},
+ {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:24c34bea555fe42d9f928ba0a740c553088500377448febecaa82cc3e88aa1fa"},
+ {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:69cac419ac6a6baad202c85aaf467b65ac860ac2e7f2ac1686dc40dbb52f6577"},
+ {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:960d67d0611f4c87da7e2ae2eacf7ea81a5be967861e0c63cf205215afbfac59"},
+ {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:41be2964bd4b15bf575e5daee5a5ce7ed3115320fb3c2b71fca05582ffa4dc9e"},
+ {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:46d84d49e00c9429238a7ce02dc0be8f6d7cd0cd405abd1bebdc991bf27c15bd"},
+ {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15900082e886edb37480335d9d518cec978afc69ccbc30bd18610b7c1b22a718"},
+ {file = "frozenlist-1.7.0-cp310-cp310-win32.whl", hash = "sha256:400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e"},
+ {file = "frozenlist-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464"},
+ {file = "frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a"},
+ {file = "frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750"},
+ {file = "frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd"},
+ {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2"},
+ {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f"},
+ {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30"},
+ {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98"},
+ {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86"},
+ {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae"},
+ {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8"},
+ {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31"},
+ {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7"},
+ {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5"},
+ {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898"},
+ {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56"},
+ {file = "frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7"},
+ {file = "frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d"},
+ {file = "frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2"},
+ {file = "frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb"},
+ {file = "frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478"},
+ {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8"},
+ {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08"},
+ {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4"},
+ {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b"},
+ {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e"},
+ {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca"},
+ {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df"},
+ {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5"},
+ {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025"},
+ {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01"},
+ {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08"},
+ {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43"},
+ {file = "frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3"},
+ {file = "frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a"},
+ {file = "frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee"},
+ {file = "frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d"},
+ {file = "frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43"},
+ {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d"},
+ {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee"},
+ {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb"},
+ {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f"},
+ {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60"},
+ {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00"},
+ {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b"},
+ {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c"},
+ {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949"},
+ {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca"},
+ {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b"},
+ {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e"},
+ {file = "frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1"},
+ {file = "frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81"},
+ {file = "frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e"},
+ {file = "frozenlist-1.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cea3dbd15aea1341ea2de490574a4a37ca080b2ae24e4b4f4b51b9057b4c3630"},
+ {file = "frozenlist-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d536ee086b23fecc36c2073c371572374ff50ef4db515e4e503925361c24f71"},
+ {file = "frozenlist-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dfcebf56f703cb2e346315431699f00db126d158455e513bd14089d992101e44"},
+ {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974c5336e61d6e7eb1ea5b929cb645e882aadab0095c5a6974a111e6479f8878"},
+ {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c70db4a0ab5ab20878432c40563573229a7ed9241506181bba12f6b7d0dc41cb"},
+ {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1137b78384eebaf70560a36b7b229f752fb64d463d38d1304939984d5cb887b6"},
+ {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e793a9f01b3e8b5c0bc646fb59140ce0efcc580d22a3468d70766091beb81b35"},
+ {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74739ba8e4e38221d2c5c03d90a7e542cb8ad681915f4ca8f68d04f810ee0a87"},
+ {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e63344c4e929b1a01e29bc184bbb5fd82954869033765bfe8d65d09e336a677"},
+ {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2ea2a7369eb76de2217a842f22087913cdf75f63cf1307b9024ab82dfb525938"},
+ {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:836b42f472a0e006e02499cef9352ce8097f33df43baaba3e0a28a964c26c7d2"},
+ {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e22b9a99741294b2571667c07d9f8cceec07cb92aae5ccda39ea1b6052ed4319"},
+ {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:9a19e85cc503d958abe5218953df722748d87172f71b73cf3c9257a91b999890"},
+ {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f22dac33bb3ee8fe3e013aa7b91dc12f60d61d05b7fe32191ffa84c3aafe77bd"},
+ {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ccec739a99e4ccf664ea0775149f2749b8a6418eb5b8384b4dc0a7d15d304cb"},
+ {file = "frozenlist-1.7.0-cp39-cp39-win32.whl", hash = "sha256:b3950f11058310008a87757f3eee16a8e1ca97979833239439586857bc25482e"},
+ {file = "frozenlist-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:43a82fce6769c70f2f5a06248b614a7d268080a9d20f7457ef10ecee5af82b63"},
+ {file = "frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e"},
+ {file = "frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f"},
+]
+
+[[package]]
+name = "fsspec"
+version = "2025.9.0"
+description = "File-system specification"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "fsspec-2025.9.0-py3-none-any.whl", hash = "sha256:530dc2a2af60a414a832059574df4a6e10cce927f6f4a78209390fe38955cfb7"},
+ {file = "fsspec-2025.9.0.tar.gz", hash = "sha256:19fd429483d25d28b65ec68f9f4adc16c17ea2c7c7bf54ec61360d478fb19c19"},
+]
+
+[package.extras]
+abfs = ["adlfs"]
+adl = ["adlfs"]
+arrow = ["pyarrow (>=1)"]
+dask = ["dask", "distributed"]
+dev = ["pre-commit", "ruff (>=0.5)"]
+doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"]
+dropbox = ["dropbox", "dropboxdrivefs", "requests"]
+full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"]
+fuse = ["fusepy"]
+gcs = ["gcsfs"]
+git = ["pygit2"]
+github = ["requests"]
+gs = ["gcsfs"]
+gui = ["panel"]
+hdfs = ["pyarrow (>=1)"]
+http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"]
+libarchive = ["libarchive-c"]
+oci = ["ocifs"]
+s3 = ["s3fs"]
+sftp = ["paramiko"]
+smb = ["smbprotocol"]
+ssh = ["paramiko"]
+test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"]
+test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"]
+test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard ; python_version < \"3.14\""]
+tqdm = ["tqdm"]
+
[[package]]
name = "google"
version = "3.0.0"
@@ -522,14 +866,14 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"]
[[package]]
name = "google-api-python-client"
-version = "2.181.0"
+version = "2.182.0"
description = "Google API Client Library for Python"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
- {file = "google_api_python_client-2.181.0-py3-none-any.whl", hash = "sha256:348730e3ece46434a01415f3d516d7a0885c8e624ce799f50f2d4d86c2475fb7"},
- {file = "google_api_python_client-2.181.0.tar.gz", hash = "sha256:d7060962a274a16a2c6f8fb4b1569324dbff11bfbca8eb050b88ead1dd32261c"},
+ {file = "google_api_python_client-2.182.0-py3-none-any.whl", hash = "sha256:a9b071036d41a17991d8fbf27bedb61f2888a39ae5696cb5a326bf999b2d5209"},
+ {file = "google_api_python_client-2.182.0.tar.gz", hash = "sha256:cb2aa127e33c3a31e89a06f39cf9de982db90a98dee020911b21013afafad35f"},
]
[package.dependencies]
@@ -722,6 +1066,28 @@ files = [
{file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"},
]
+[[package]]
+name = "hf-xet"
+version = "1.1.10"
+description = "Fast transfer of large files with the Hugging Face Hub."
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\""
+files = [
+ {file = "hf_xet-1.1.10-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:686083aca1a6669bc85c21c0563551cbcdaa5cf7876a91f3d074a030b577231d"},
+ {file = "hf_xet-1.1.10-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:71081925383b66b24eedff3013f8e6bbd41215c3338be4b94ba75fd75b21513b"},
+ {file = "hf_xet-1.1.10-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6bceb6361c80c1cc42b5a7b4e3efd90e64630bcf11224dcac50ef30a47e435"},
+ {file = "hf_xet-1.1.10-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eae7c1fc8a664e54753ffc235e11427ca61f4b0477d757cc4eb9ae374b69f09c"},
+ {file = "hf_xet-1.1.10-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0a0005fd08f002180f7a12d4e13b22be277725bc23ed0529f8add5c7a6309c06"},
+ {file = "hf_xet-1.1.10-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f900481cf6e362a6c549c61ff77468bd59d6dd082f3170a36acfef2eb6a6793f"},
+ {file = "hf_xet-1.1.10-cp37-abi3-win_amd64.whl", hash = "sha256:5f54b19cc347c13235ae7ee98b330c26dd65ef1df47e5316ffb1e87713ca7045"},
+ {file = "hf_xet-1.1.10.tar.gz", hash = "sha256:408aef343800a2102374a883f283ff29068055c111f003ff840733d3b715bb97"},
+]
+
+[package.extras]
+tests = ["pytest"]
+
[[package]]
name = "httpcore"
version = "1.0.9"
@@ -746,14 +1112,14 @@ trio = ["trio (>=0.22.0,<1.0)"]
[[package]]
name = "httplib2"
-version = "0.30.2"
+version = "0.31.0"
description = "A comprehensive HTTP client library."
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
- {file = "httplib2-0.30.2-py3-none-any.whl", hash = "sha256:62a665905c1f1d1069c34f933787d2a4435c67c0bc2b323645dcfbb64661b5ec"},
- {file = "httplib2-0.30.2.tar.gz", hash = "sha256:050bde6a332824b05a3deef5238f2b0372f71af46f8ca2190c2cb1f66aa376cd"},
+ {file = "httplib2-0.31.0-py3-none-any.whl", hash = "sha256:b9cd78abea9b4e43a7714c6e0f8b6b8561a6fc1e95d5dbd367f5bf0ef35f5d24"},
+ {file = "httplib2-0.31.0.tar.gz", hash = "sha256:ac7ab497c50975147d4f7b1ade44becc7df2f8954d42b38b3d69c515f531135c"},
]
[package.dependencies]
@@ -784,6 +1150,60 @@ http2 = ["h2 (>=3,<5)"]
socks = ["socksio (==1.*)"]
zstd = ["zstandard (>=0.18.0)"]
+[[package]]
+name = "huggingface-hub"
+version = "0.35.0"
+description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
+optional = false
+python-versions = ">=3.8.0"
+groups = ["main"]
+files = [
+ {file = "huggingface_hub-0.35.0-py3-none-any.whl", hash = "sha256:f2e2f693bca9a26530b1c0b9bcd4c1495644dad698e6a0060f90e22e772c31e9"},
+ {file = "huggingface_hub-0.35.0.tar.gz", hash = "sha256:ccadd2a78eef75effff184ad89401413629fabc52cefd76f6bbacb9b1c0676ac"},
+]
+
+[package.dependencies]
+filelock = "*"
+fsspec = ">=2023.5.0"
+hf-xet = {version = ">=1.1.3,<2.0.0", markers = "platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\""}
+packaging = ">=20.9"
+pyyaml = ">=5.1"
+requests = "*"
+tqdm = ">=4.42.1"
+typing-extensions = ">=3.7.4.3"
+
+[package.extras]
+all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "authlib (>=1.3.2)", "fastapi", "gradio (>=4.0.0)", "httpx", "itsdangerous", "jedi", "libcst (>=1.4.0)", "mypy (==1.15.0) ; python_version >= \"3.9\"", "mypy (>=1.14.1,<1.15.0) ; python_version == \"3.8\"", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures (<16.0)", "pytest-vcr", "pytest-xdist", "ruff (>=0.9.0)", "soundfile", "ty", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"]
+cli = ["InquirerPy (==0.3.4)"]
+dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "authlib (>=1.3.2)", "fastapi", "gradio (>=4.0.0)", "httpx", "itsdangerous", "jedi", "libcst (>=1.4.0)", "mypy (==1.15.0) ; python_version >= \"3.9\"", "mypy (>=1.14.1,<1.15.0) ; python_version == \"3.8\"", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures (<16.0)", "pytest-vcr", "pytest-xdist", "ruff (>=0.9.0)", "soundfile", "ty", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"]
+fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"]
+hf-transfer = ["hf-transfer (>=0.1.4)"]
+hf-xet = ["hf-xet (>=1.1.2,<2.0.0)"]
+inference = ["aiohttp"]
+mcp = ["aiohttp", "mcp (>=1.8.0)", "typer"]
+oauth = ["authlib (>=1.3.2)", "fastapi", "httpx", "itsdangerous"]
+quality = ["libcst (>=1.4.0)", "mypy (==1.15.0) ; python_version >= \"3.9\"", "mypy (>=1.14.1,<1.15.0) ; python_version == \"3.8\"", "ruff (>=0.9.0)", "ty"]
+tensorflow = ["graphviz", "pydot", "tensorflow"]
+tensorflow-testing = ["keras (<3.0)", "tensorflow"]
+testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "authlib (>=1.3.2)", "fastapi", "gradio (>=4.0.0)", "httpx", "itsdangerous", "jedi", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures (<16.0)", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"]
+torch = ["safetensors[torch]", "torch"]
+typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"]
+
+[[package]]
+name = "humanfriendly"
+version = "10.0"
+description = "Human friendly output for text interfaces using Python"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+groups = ["main"]
+files = [
+ {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"},
+ {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"},
+]
+
+[package.dependencies]
+pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""}
+
[[package]]
name = "idna"
version = "3.10"
@@ -811,6 +1231,94 @@ files = [
{file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"},
]
+[[package]]
+name = "jiter"
+version = "0.11.0"
+description = "Fast iterable JSON parser."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "jiter-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3893ce831e1c0094a83eeaf56c635a167d6fa8cc14393cc14298fd6fdc2a2449"},
+ {file = "jiter-0.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:25c625b9b61b5a8725267fdf867ef2e51b429687f6a4eef211f4612e95607179"},
+ {file = "jiter-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd4ca85fb6a62cf72e1c7f5e34ddef1b660ce4ed0886ec94a1ef9777d35eaa1f"},
+ {file = "jiter-0.11.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:572208127034725e79c28437b82414028c3562335f2b4f451d98136d0fc5f9cd"},
+ {file = "jiter-0.11.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:494ba627c7f550ad3dabb21862864b8f2216098dc18ff62f37b37796f2f7c325"},
+ {file = "jiter-0.11.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8da18a99f58bca3ecc2d2bba99cac000a924e115b6c4f0a2b98f752b6fbf39a"},
+ {file = "jiter-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ffd3b0fff3fabbb02cc09910c08144db6bb5697a98d227a074401e01ee63dd"},
+ {file = "jiter-0.11.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fe6530aa738a4f7d4e4702aa8f9581425d04036a5f9e25af65ebe1f708f23be"},
+ {file = "jiter-0.11.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e35d66681c133a03d7e974e7eedae89720fe8ca3bd09f01a4909b86a8adf31f5"},
+ {file = "jiter-0.11.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c59459beca2fbc9718b6f1acb7bfb59ebc3eb4294fa4d40e9cb679dafdcc6c60"},
+ {file = "jiter-0.11.0-cp310-cp310-win32.whl", hash = "sha256:b7b0178417b0dcfc5f259edbc6db2b1f5896093ed9035ee7bab0f2be8854726d"},
+ {file = "jiter-0.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:11df2bf99fb4754abddd7f5d940a48e51f9d11624d6313ca4314145fcad347f0"},
+ {file = "jiter-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cb5d9db02979c3f49071fce51a48f4b4e4cf574175fb2b11c7a535fa4867b222"},
+ {file = "jiter-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1dc6a123f3471c4730db7ca8ba75f1bb3dcb6faeb8d46dd781083e7dee88b32d"},
+ {file = "jiter-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09858f8d230f031c7b8e557429102bf050eea29c77ad9c34c8fe253c5329acb7"},
+ {file = "jiter-0.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbe2196c4a0ce760925a74ab4456bf644748ab0979762139626ad138f6dac72d"},
+ {file = "jiter-0.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5beb56d22b63647bafd0b74979216fdee80c580c0c63410be8c11053860ffd09"},
+ {file = "jiter-0.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97025d09ef549795d8dc720a824312cee3253c890ac73c621721ddfc75066789"},
+ {file = "jiter-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50880a6da65d8c23a2cf53c412847d9757e74cc9a3b95c5704a1d1a24667347"},
+ {file = "jiter-0.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:452d80a1c86c095a242007bd9fc5d21b8a8442307193378f891cb8727e469648"},
+ {file = "jiter-0.11.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e84e58198d4894668eec2da660ffff60e0f3e60afa790ecc50cb12b0e02ca1d4"},
+ {file = "jiter-0.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df64edcfc5dd5279a791eea52aa113d432c933119a025b0b5739f90d2e4e75f1"},
+ {file = "jiter-0.11.0-cp311-cp311-win32.whl", hash = "sha256:144fc21337d21b1d048f7f44bf70881e1586401d405ed3a98c95a114a9994982"},
+ {file = "jiter-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:b0f32e644d241293b892b1a6dd8f0b9cc029bfd94c97376b2681c36548aabab7"},
+ {file = "jiter-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb7b377688cc3850bbe5c192a6bd493562a0bc50cbc8b047316428fbae00ada"},
+ {file = "jiter-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b7cbe3f25bd0d8abb468ba4302a5d45617ee61b2a7a638f63fee1dc086be99"},
+ {file = "jiter-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a7f0ec81d5b7588c5cade1eb1925b91436ae6726dc2df2348524aeabad5de6"},
+ {file = "jiter-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07630bb46ea2a6b9c6ed986c6e17e35b26148cce2c535454b26ee3f0e8dcaba1"},
+ {file = "jiter-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7764f27d28cd4a9cbc61704dfcd80c903ce3aad106a37902d3270cd6673d17f4"},
+ {file = "jiter-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4a6c4a737d486f77f842aeb22807edecb4a9417e6700c7b981e16d34ba7c72"},
+ {file = "jiter-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf408d2a0abd919b60de8c2e7bc5eeab72d4dafd18784152acc7c9adc3291591"},
+ {file = "jiter-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cdef53eda7d18e799625023e1e250dbc18fbc275153039b873ec74d7e8883e09"},
+ {file = "jiter-0.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:53933a38ef7b551dd9c7f1064f9d7bb235bb3168d0fa5f14f0798d1b7ea0d9c5"},
+ {file = "jiter-0.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11840d2324c9ab5162fc1abba23bc922124fedcff0d7b7f85fffa291e2f69206"},
+ {file = "jiter-0.11.0-cp312-cp312-win32.whl", hash = "sha256:4f01a744d24a5f2bb4a11657a1b27b61dc038ae2e674621a74020406e08f749b"},
+ {file = "jiter-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:29fff31190ab3a26de026da2f187814f4b9c6695361e20a9ac2123e4d4378a4c"},
+ {file = "jiter-0.11.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:4441a91b80a80249f9a6452c14b2c24708f139f64de959943dfeaa6cb915e8eb"},
+ {file = "jiter-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ff85fc6d2a431251ad82dbd1ea953affb5a60376b62e7d6809c5cd058bb39471"},
+ {file = "jiter-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5e86126d64706fd28dfc46f910d496923c6f95b395138c02d0e252947f452bd"},
+ {file = "jiter-0.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad8bd82165961867a10f52010590ce0b7a8c53da5ddd8bbb62fef68c181b921"},
+ {file = "jiter-0.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b42c2cd74273455ce439fd9528db0c6e84b5623cb74572305bdd9f2f2961d3df"},
+ {file = "jiter-0.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0062dab98172dd0599fcdbf90214d0dcde070b1ff38a00cc1b90e111f071982"},
+ {file = "jiter-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb948402821bc76d1f6ef0f9e19b816f9b09f8577844ba7140f0b6afe994bc64"},
+ {file = "jiter-0.11.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25a5b1110cca7329fd0daf5060faa1234be5c11e988948e4f1a1923b6a457fe1"},
+ {file = "jiter-0.11.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:bf11807e802a214daf6c485037778843fadd3e2ec29377ae17e0706ec1a25758"},
+ {file = "jiter-0.11.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:dbb57da40631c267861dd0090461222060960012d70fd6e4c799b0f62d0ba166"},
+ {file = "jiter-0.11.0-cp313-cp313-win32.whl", hash = "sha256:8e36924dad32c48d3c5e188d169e71dc6e84d6cb8dedefea089de5739d1d2f80"},
+ {file = "jiter-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:452d13e4fd59698408087235259cebe67d9d49173b4dacb3e8d35ce4acf385d6"},
+ {file = "jiter-0.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:089f9df9f69532d1339e83142438668f52c97cd22ee2d1195551c2b1a9e6cf33"},
+ {file = "jiter-0.11.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29ed1fe69a8c69bf0f2a962d8d706c7b89b50f1332cd6b9fbda014f60bd03a03"},
+ {file = "jiter-0.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a4d71d7ea6ea8786291423fe209acf6f8d398a0759d03e7f24094acb8ab686ba"},
+ {file = "jiter-0.11.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:9a6dff27eca70930bdbe4cbb7c1a4ba8526e13b63dc808c0670083d2d51a4a72"},
+ {file = "jiter-0.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b1ae2a7593a62132c7d4c2abbee80bbbb94fdc6d157e2c6cc966250c564ef774"},
+ {file = "jiter-0.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b13a431dba4b059e9e43019d3022346d009baf5066c24dcdea321a303cde9f0"},
+ {file = "jiter-0.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:af62e84ca3889604ebb645df3b0a3f3bcf6b92babbff642bd214616f57abb93a"},
+ {file = "jiter-0.11.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f3b32bb723246e6b351aecace52aba78adb8eeb4b2391630322dc30ff6c773"},
+ {file = "jiter-0.11.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:adcab442f4a099a358a7f562eaa54ed6456fb866e922c6545a717be51dbed7d7"},
+ {file = "jiter-0.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9967c2ab338ee2b2c0102fd379ec2693c496abf71ffd47e4d791d1f593b68e2"},
+ {file = "jiter-0.11.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e7d0bed3b187af8b47a981d9742ddfc1d9b252a7235471ad6078e7e4e5fe75c2"},
+ {file = "jiter-0.11.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:f6fe0283e903ebc55f1a6cc569b8c1f3bf4abd026fed85e3ff8598a9e6f982f0"},
+ {file = "jiter-0.11.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:4ee5821e3d66606b29ae5b497230b304f1376f38137d69e35f8d2bd5f310ff73"},
+ {file = "jiter-0.11.0-cp314-cp314-win32.whl", hash = "sha256:c2d13ba7567ca8799f17c76ed56b1d49be30df996eb7fa33e46b62800562a5e2"},
+ {file = "jiter-0.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fb4790497369d134a07fc763cc88888c46f734abdd66f9fdf7865038bf3a8f40"},
+ {file = "jiter-0.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e2bbf24f16ba5ad4441a9845e40e4ea0cb9eed00e76ba94050664ef53ef4406"},
+ {file = "jiter-0.11.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:719891c2fb7628a41adff4f2f54c19380a27e6fdfdb743c24680ef1a54c67bd0"},
+ {file = "jiter-0.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df7f1927cbdf34cb91262a5418ca06920fd42f1cf733936d863aeb29b45a14ef"},
+ {file = "jiter-0.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e71ae6d969d0c9bab336c5e9e2fabad31e74d823f19e3604eaf96d9a97f463df"},
+ {file = "jiter-0.11.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5661469a7b2be25ade3a4bb6c21ffd1e142e13351a0759f264dfdd3ad99af1ab"},
+ {file = "jiter-0.11.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76c15ef0d3d02f8b389066fa4c410a0b89e9cc6468a1f0674c5925d2f3c3e890"},
+ {file = "jiter-0.11.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63782a1350917a27817030716566ed3d5b3c731500fd42d483cbd7094e2c5b25"},
+ {file = "jiter-0.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a7092b699646a1ddc03a7b112622d9c066172627c7382659befb0d2996f1659"},
+ {file = "jiter-0.11.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f637b8e818f6d75540f350a6011ce21252573c0998ea1b4365ee54b7672c23c5"},
+ {file = "jiter-0.11.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a624d87719e1b5d09c15286eaee7e1532a40c692a096ea7ca791121365f548c1"},
+ {file = "jiter-0.11.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9d0146d8d9b3995821bb586fc8256636258947c2f39da5bab709f3a28fb1a0b"},
+ {file = "jiter-0.11.0-cp39-cp39-win32.whl", hash = "sha256:d067655a7cf0831eb8ec3e39cbd752995e9b69a2206df3535b3a067fac23b032"},
+ {file = "jiter-0.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:f05d03775a11aaf132c447436983169958439f1219069abf24662a672851f94e"},
+ {file = "jiter-0.11.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:902b43386c04739229076bd1c4c69de5d115553d982ab442a8ae82947c72ede7"},
+ {file = "jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4"},
+]
+
[[package]]
name = "joblib"
version = "1.5.2"
@@ -895,6 +1403,144 @@ files = [
unidic = ["unidic"]
unidic-lite = ["unidic-lite"]
+[[package]]
+name = "mpmath"
+version = "1.3.0"
+description = "Python library for arbitrary-precision floating-point arithmetic"
+optional = false
+python-versions = "*"
+groups = ["main"]
+files = [
+ {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"},
+ {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"},
+]
+
+[package.extras]
+develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"]
+docs = ["sphinx"]
+gmpy = ["gmpy2 (>=2.1.0a4) ; platform_python_implementation != \"PyPy\""]
+tests = ["pytest (>=4.6)"]
+
+[[package]]
+name = "multidict"
+version = "6.6.4"
+description = "multidict implementation"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "multidict-6.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b8aa6f0bd8125ddd04a6593437bad6a7e70f300ff4180a531654aa2ab3f6d58f"},
+ {file = "multidict-6.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b9e5853bbd7264baca42ffc53391b490d65fe62849bf2c690fa3f6273dbcd0cb"},
+ {file = "multidict-6.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0af5f9dee472371e36d6ae38bde009bd8ce65ac7335f55dcc240379d7bed1495"},
+ {file = "multidict-6.6.4-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:d24f351e4d759f5054b641c81e8291e5d122af0fca5c72454ff77f7cbe492de8"},
+ {file = "multidict-6.6.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db6a3810eec08280a172a6cd541ff4a5f6a97b161d93ec94e6c4018917deb6b7"},
+ {file = "multidict-6.6.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a1b20a9d56b2d81e2ff52ecc0670d583eaabaa55f402e8d16dd062373dbbe796"},
+ {file = "multidict-6.6.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8c9854df0eaa610a23494c32a6f44a3a550fb398b6b51a56e8c6b9b3689578db"},
+ {file = "multidict-6.6.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4bb7627fd7a968f41905a4d6343b0d63244a0623f006e9ed989fa2b78f4438a0"},
+ {file = "multidict-6.6.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caebafea30ed049c57c673d0b36238b1748683be2593965614d7b0e99125c877"},
+ {file = "multidict-6.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ad887a8250eb47d3ab083d2f98db7f48098d13d42eb7a3b67d8a5c795f224ace"},
+ {file = "multidict-6.6.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:ed8358ae7d94ffb7c397cecb62cbac9578a83ecefc1eba27b9090ee910e2efb6"},
+ {file = "multidict-6.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ecab51ad2462197a4c000b6d5701fc8585b80eecb90583635d7e327b7b6923eb"},
+ {file = "multidict-6.6.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c5c97aa666cf70e667dfa5af945424ba1329af5dd988a437efeb3a09430389fb"},
+ {file = "multidict-6.6.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:9a950b7cf54099c1209f455ac5970b1ea81410f2af60ed9eb3c3f14f0bfcf987"},
+ {file = "multidict-6.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:163c7ea522ea9365a8a57832dea7618e6cbdc3cd75f8c627663587459a4e328f"},
+ {file = "multidict-6.6.4-cp310-cp310-win32.whl", hash = "sha256:17d2cbbfa6ff20821396b25890f155f40c986f9cfbce5667759696d83504954f"},
+ {file = "multidict-6.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:ce9a40fbe52e57e7edf20113a4eaddfacac0561a0879734e636aa6d4bb5e3fb0"},
+ {file = "multidict-6.6.4-cp310-cp310-win_arm64.whl", hash = "sha256:01d0959807a451fe9fdd4da3e139cb5b77f7328baf2140feeaf233e1d777b729"},
+ {file = "multidict-6.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c"},
+ {file = "multidict-6.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb"},
+ {file = "multidict-6.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e"},
+ {file = "multidict-6.6.4-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded"},
+ {file = "multidict-6.6.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683"},
+ {file = "multidict-6.6.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a"},
+ {file = "multidict-6.6.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9"},
+ {file = "multidict-6.6.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50"},
+ {file = "multidict-6.6.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52"},
+ {file = "multidict-6.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6"},
+ {file = "multidict-6.6.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e"},
+ {file = "multidict-6.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3"},
+ {file = "multidict-6.6.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c"},
+ {file = "multidict-6.6.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b"},
+ {file = "multidict-6.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f"},
+ {file = "multidict-6.6.4-cp311-cp311-win32.whl", hash = "sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2"},
+ {file = "multidict-6.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e"},
+ {file = "multidict-6.6.4-cp311-cp311-win_arm64.whl", hash = "sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf"},
+ {file = "multidict-6.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8"},
+ {file = "multidict-6.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3"},
+ {file = "multidict-6.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b"},
+ {file = "multidict-6.6.4-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287"},
+ {file = "multidict-6.6.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138"},
+ {file = "multidict-6.6.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6"},
+ {file = "multidict-6.6.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9"},
+ {file = "multidict-6.6.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c"},
+ {file = "multidict-6.6.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402"},
+ {file = "multidict-6.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7"},
+ {file = "multidict-6.6.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f"},
+ {file = "multidict-6.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d"},
+ {file = "multidict-6.6.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7"},
+ {file = "multidict-6.6.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802"},
+ {file = "multidict-6.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24"},
+ {file = "multidict-6.6.4-cp312-cp312-win32.whl", hash = "sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793"},
+ {file = "multidict-6.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e"},
+ {file = "multidict-6.6.4-cp312-cp312-win_arm64.whl", hash = "sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364"},
+ {file = "multidict-6.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f46a6e8597f9bd71b31cc708195d42b634c8527fecbcf93febf1052cacc1f16e"},
+ {file = "multidict-6.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:22e38b2bc176c5eb9c0a0e379f9d188ae4cd8b28c0f53b52bce7ab0a9e534657"},
+ {file = "multidict-6.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5df8afd26f162da59e218ac0eefaa01b01b2e6cd606cffa46608f699539246da"},
+ {file = "multidict-6.6.4-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:49517449b58d043023720aa58e62b2f74ce9b28f740a0b5d33971149553d72aa"},
+ {file = "multidict-6.6.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9408439537c5afdca05edd128a63f56a62680f4b3c234301055d7a2000220f"},
+ {file = "multidict-6.6.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:87a32d20759dc52a9e850fe1061b6e41ab28e2998d44168a8a341b99ded1dba0"},
+ {file = "multidict-6.6.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52e3c8d43cdfff587ceedce9deb25e6ae77daba560b626e97a56ddcad3756879"},
+ {file = "multidict-6.6.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ad8850921d3a8d8ff6fbef790e773cecfc260bbfa0566998980d3fa8f520bc4a"},
+ {file = "multidict-6.6.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:497a2954adc25c08daff36f795077f63ad33e13f19bfff7736e72c785391534f"},
+ {file = "multidict-6.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:024ce601f92d780ca1617ad4be5ac15b501cc2414970ffa2bb2bbc2bd5a68fa5"},
+ {file = "multidict-6.6.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a693fc5ed9bdd1c9e898013e0da4dcc640de7963a371c0bd458e50e046bf6438"},
+ {file = "multidict-6.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:190766dac95aab54cae5b152a56520fd99298f32a1266d66d27fdd1b5ac00f4e"},
+ {file = "multidict-6.6.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:34d8f2a5ffdceab9dcd97c7a016deb2308531d5f0fced2bb0c9e1df45b3363d7"},
+ {file = "multidict-6.6.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:59e8d40ab1f5a8597abcef00d04845155a5693b5da00d2c93dbe88f2050f2812"},
+ {file = "multidict-6.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:467fe64138cfac771f0e949b938c2e1ada2b5af22f39692aa9258715e9ea613a"},
+ {file = "multidict-6.6.4-cp313-cp313-win32.whl", hash = "sha256:14616a30fe6d0a48d0a48d1a633ab3b8bec4cf293aac65f32ed116f620adfd69"},
+ {file = "multidict-6.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:40cd05eaeb39e2bc8939451f033e57feaa2ac99e07dbca8afe2be450a4a3b6cf"},
+ {file = "multidict-6.6.4-cp313-cp313-win_arm64.whl", hash = "sha256:f6eb37d511bfae9e13e82cb4d1af36b91150466f24d9b2b8a9785816deb16605"},
+ {file = "multidict-6.6.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:6c84378acd4f37d1b507dfa0d459b449e2321b3ba5f2338f9b085cf7a7ba95eb"},
+ {file = "multidict-6.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0e0558693063c75f3d952abf645c78f3c5dfdd825a41d8c4d8156fc0b0da6e7e"},
+ {file = "multidict-6.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3f8e2384cb83ebd23fd07e9eada8ba64afc4c759cd94817433ab8c81ee4b403f"},
+ {file = "multidict-6.6.4-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f996b87b420995a9174b2a7c1a8daf7db4750be6848b03eb5e639674f7963773"},
+ {file = "multidict-6.6.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc356250cffd6e78416cf5b40dc6a74f1edf3be8e834cf8862d9ed5265cf9b0e"},
+ {file = "multidict-6.6.4-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:dadf95aa862714ea468a49ad1e09fe00fcc9ec67d122f6596a8d40caf6cec7d0"},
+ {file = "multidict-6.6.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7dd57515bebffd8ebd714d101d4c434063322e4fe24042e90ced41f18b6d3395"},
+ {file = "multidict-6.6.4-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:967af5f238ebc2eb1da4e77af5492219fbd9b4b812347da39a7b5f5c72c0fa45"},
+ {file = "multidict-6.6.4-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a4c6875c37aae9794308ec43e3530e4aa0d36579ce38d89979bbf89582002bb"},
+ {file = "multidict-6.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f683a551e92bdb7fac545b9c6f9fa2aebdeefa61d607510b3533286fcab67f5"},
+ {file = "multidict-6.6.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:3ba5aaf600edaf2a868a391779f7a85d93bed147854925f34edd24cc70a3e141"},
+ {file = "multidict-6.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:580b643b7fd2c295d83cad90d78419081f53fd532d1f1eb67ceb7060f61cff0d"},
+ {file = "multidict-6.6.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:37b7187197da6af3ee0b044dbc9625afd0c885f2800815b228a0e70f9a7f473d"},
+ {file = "multidict-6.6.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e1b93790ed0bc26feb72e2f08299691ceb6da5e9e14a0d13cc74f1869af327a0"},
+ {file = "multidict-6.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a506a77ddee1efcca81ecbeae27ade3e09cdf21a8ae854d766c2bb4f14053f92"},
+ {file = "multidict-6.6.4-cp313-cp313t-win32.whl", hash = "sha256:f93b2b2279883d1d0a9e1bd01f312d6fc315c5e4c1f09e112e4736e2f650bc4e"},
+ {file = "multidict-6.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:6d46a180acdf6e87cc41dc15d8f5c2986e1e8739dc25dbb7dac826731ef381a4"},
+ {file = "multidict-6.6.4-cp313-cp313t-win_arm64.whl", hash = "sha256:756989334015e3335d087a27331659820d53ba432befdef6a718398b0a8493ad"},
+ {file = "multidict-6.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:af7618b591bae552b40dbb6f93f5518328a949dac626ee75927bba1ecdeea9f4"},
+ {file = "multidict-6.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b6819f83aef06f560cb15482d619d0e623ce9bf155115150a85ab11b8342a665"},
+ {file = "multidict-6.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4d09384e75788861e046330308e7af54dd306aaf20eb760eb1d0de26b2bea2cb"},
+ {file = "multidict-6.6.4-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:a59c63061f1a07b861c004e53869eb1211ffd1a4acbca330e3322efa6dd02978"},
+ {file = "multidict-6.6.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:350f6b0fe1ced61e778037fdc7613f4051c8baf64b1ee19371b42a3acdb016a0"},
+ {file = "multidict-6.6.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0c5cbac6b55ad69cb6aa17ee9343dfbba903118fd530348c330211dc7aa756d1"},
+ {file = "multidict-6.6.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:630f70c32b8066ddfd920350bc236225814ad94dfa493fe1910ee17fe4365cbb"},
+ {file = "multidict-6.6.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8d4916a81697faec6cb724a273bd5457e4c6c43d82b29f9dc02c5542fd21fc9"},
+ {file = "multidict-6.6.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e42332cf8276bb7645d310cdecca93a16920256a5b01bebf747365f86a1675b"},
+ {file = "multidict-6.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f3be27440f7644ab9a13a6fc86f09cdd90b347c3c5e30c6d6d860de822d7cb53"},
+ {file = "multidict-6.6.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:21f216669109e02ef3e2415ede07f4f8987f00de8cdfa0cc0b3440d42534f9f0"},
+ {file = "multidict-6.6.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d9890d68c45d1aeac5178ded1d1cccf3bc8d7accf1f976f79bf63099fb16e4bd"},
+ {file = "multidict-6.6.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:edfdcae97cdc5d1a89477c436b61f472c4d40971774ac4729c613b4b133163cb"},
+ {file = "multidict-6.6.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:0b2e886624be5773e69cf32bcb8534aecdeb38943520b240fed3d5596a430f2f"},
+ {file = "multidict-6.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:be5bf4b3224948032a845d12ab0f69f208293742df96dc14c4ff9b09e508fc17"},
+ {file = "multidict-6.6.4-cp39-cp39-win32.whl", hash = "sha256:10a68a9191f284fe9d501fef4efe93226e74df92ce7a24e301371293bd4918ae"},
+ {file = "multidict-6.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:ee25f82f53262f9ac93bd7e58e47ea1bdcc3393cef815847e397cba17e284210"},
+ {file = "multidict-6.6.4-cp39-cp39-win_arm64.whl", hash = "sha256:f9867e55590e0855bcec60d4f9a092b69476db64573c9fe17e92b0c50614c16a"},
+ {file = "multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c"},
+ {file = "multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd"},
+]
+
[[package]]
name = "mypy-extensions"
version = "1.1.0"
@@ -1008,6 +1654,70 @@ rsa = ["cryptography (>=3.0.0)"]
signals = ["blinker (>=1.4.0)"]
signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
+[[package]]
+name = "onnxruntime"
+version = "1.22.1"
+description = "ONNX Runtime is a runtime accelerator for Machine Learning models"
+optional = false
+python-versions = ">=3.10"
+groups = ["main"]
+files = [
+ {file = "onnxruntime-1.22.1-cp310-cp310-macosx_13_0_universal2.whl", hash = "sha256:80e7f51da1f5201c1379b8d6ef6170505cd800e40da216290f5e06be01aadf95"},
+ {file = "onnxruntime-1.22.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89ddfdbbdaf7e3a59515dee657f6515601d55cb21a0f0f48c81aefc54ff1b73"},
+ {file = "onnxruntime-1.22.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bddc75868bcf6f9ed76858a632f65f7b1846bdcefc6d637b1e359c2c68609964"},
+ {file = "onnxruntime-1.22.1-cp310-cp310-win_amd64.whl", hash = "sha256:01e2f21b2793eb0c8642d2be3cee34cc7d96b85f45f6615e4e220424158877ce"},
+ {file = "onnxruntime-1.22.1-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:f4581bccb786da68725d8eac7c63a8f31a89116b8761ff8b4989dc58b61d49a0"},
+ {file = "onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7ae7526cf10f93454beb0f751e78e5cb7619e3b92f9fc3bd51aa6f3b7a8977e5"},
+ {file = "onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f6effa1299ac549a05c784d50292e3378dbbf010346ded67400193b09ddc2f04"},
+ {file = "onnxruntime-1.22.1-cp311-cp311-win_amd64.whl", hash = "sha256:f28a42bb322b4ca6d255531bb334a2b3e21f172e37c1741bd5e66bc4b7b61f03"},
+ {file = "onnxruntime-1.22.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:a938d11c0dc811badf78e435daa3899d9af38abee950d87f3ab7430eb5b3cf5a"},
+ {file = "onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:984cea2a02fcc5dfea44ade9aca9fe0f7a8a2cd6f77c258fc4388238618f3928"},
+ {file = "onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d39a530aff1ec8d02e365f35e503193991417788641b184f5b1e8c9a6d5ce8d"},
+ {file = "onnxruntime-1.22.1-cp312-cp312-win_amd64.whl", hash = "sha256:6a64291d57ea966a245f749eb970f4fa05a64d26672e05a83fdb5db6b7d62f87"},
+ {file = "onnxruntime-1.22.1-cp313-cp313-macosx_13_0_universal2.whl", hash = "sha256:d29c7d87b6cbed8fecfd09dca471832384d12a69e1ab873e5effbb94adc3e966"},
+ {file = "onnxruntime-1.22.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:460487d83b7056ba98f1f7bac80287224c31d8149b15712b0d6f5078fcc33d0f"},
+ {file = "onnxruntime-1.22.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b0c37070268ba4e02a1a9d28560cd00cd1e94f0d4f275cbef283854f861a65fa"},
+ {file = "onnxruntime-1.22.1-cp313-cp313-win_amd64.whl", hash = "sha256:70980d729145a36a05f74b573435531f55ef9503bcda81fc6c3d6b9306199982"},
+ {file = "onnxruntime-1.22.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33a7980bbc4b7f446bac26c3785652fe8730ed02617d765399e89ac7d44e0f7d"},
+ {file = "onnxruntime-1.22.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7e823624b015ea879d976cbef8bfaed2f7e2cc233d7506860a76dd37f8f381"},
+]
+
+[package.dependencies]
+coloredlogs = "*"
+flatbuffers = "*"
+numpy = ">=1.21.6"
+packaging = "*"
+protobuf = "*"
+sympy = "*"
+
+[[package]]
+name = "openai"
+version = "1.107.3"
+description = "The official Python library for the openai API"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "openai-1.107.3-py3-none-any.whl", hash = "sha256:4ca54a847235ac04c6320da70fdc06b62d71439de9ec0aa40d5690c3064d4025"},
+ {file = "openai-1.107.3.tar.gz", hash = "sha256:69bb8032b05c5f00f7660e422f70f9aabc94793b9a30c5f899360ed21e46314f"},
+]
+
+[package.dependencies]
+anyio = ">=3.5.0,<5"
+distro = ">=1.7.0,<2"
+httpx = ">=0.23.0,<1"
+jiter = ">=0.4.0,<1"
+pydantic = ">=1.9.0,<3"
+sniffio = "*"
+tqdm = ">4"
+typing-extensions = ">=4.11,<5"
+
+[package.extras]
+aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.8)"]
+datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
+realtime = ["websockets (>=13,<16)"]
+voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"]
+
[[package]]
name = "outcome"
version = "1.3.0.post0"
@@ -1082,14 +1792,122 @@ testing = ["coverage", "pytest", "pytest-benchmark"]
[[package]]
name = "poetry-core"
-version = "2.1.3"
+version = "2.2.0"
description = "Poetry PEP 517 Build Backend"
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
- {file = "poetry_core-2.1.3-py3-none-any.whl", hash = "sha256:2c704f05016698a54ca1d327f46ce2426d72eaca6ff614132c8477c292266771"},
- {file = "poetry_core-2.1.3.tar.gz", hash = "sha256:0522a015477ed622c89aad56a477a57813cace0c8e7ff2a2906b7ef4a2e296a4"},
+ {file = "poetry_core-2.2.0-py3-none-any.whl", hash = "sha256:0edea81d07e88cbd407369eef753c722da8ff1338f554788dc04636e756318fc"},
+ {file = "poetry_core-2.2.0.tar.gz", hash = "sha256:b4033b71b99717a942030e074fec7e3082e5fde7a8ed10f02cd2413bdf940b1f"},
+]
+
+[[package]]
+name = "propcache"
+version = "0.3.2"
+description = "Accelerated property cache"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770"},
+ {file = "propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3"},
+ {file = "propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3"},
+ {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e"},
+ {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220"},
+ {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb"},
+ {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614"},
+ {file = "propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50"},
+ {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339"},
+ {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0"},
+ {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2"},
+ {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7"},
+ {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b"},
+ {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c"},
+ {file = "propcache-0.3.2-cp310-cp310-win32.whl", hash = "sha256:404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70"},
+ {file = "propcache-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9"},
+ {file = "propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be"},
+ {file = "propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f"},
+ {file = "propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9"},
+ {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf"},
+ {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9"},
+ {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66"},
+ {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df"},
+ {file = "propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2"},
+ {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7"},
+ {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95"},
+ {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e"},
+ {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e"},
+ {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf"},
+ {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e"},
+ {file = "propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897"},
+ {file = "propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39"},
+ {file = "propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10"},
+ {file = "propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154"},
+ {file = "propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615"},
+ {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db"},
+ {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1"},
+ {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c"},
+ {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67"},
+ {file = "propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b"},
+ {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8"},
+ {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251"},
+ {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474"},
+ {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535"},
+ {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06"},
+ {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1"},
+ {file = "propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1"},
+ {file = "propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c"},
+ {file = "propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945"},
+ {file = "propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252"},
+ {file = "propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f"},
+ {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33"},
+ {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e"},
+ {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1"},
+ {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3"},
+ {file = "propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1"},
+ {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6"},
+ {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387"},
+ {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4"},
+ {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88"},
+ {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206"},
+ {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43"},
+ {file = "propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02"},
+ {file = "propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05"},
+ {file = "propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b"},
+ {file = "propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0"},
+ {file = "propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e"},
+ {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28"},
+ {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a"},
+ {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c"},
+ {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725"},
+ {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892"},
+ {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44"},
+ {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe"},
+ {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81"},
+ {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba"},
+ {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770"},
+ {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330"},
+ {file = "propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394"},
+ {file = "propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198"},
+ {file = "propcache-0.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a7fad897f14d92086d6b03fdd2eb844777b0c4d7ec5e3bac0fbae2ab0602bbe5"},
+ {file = "propcache-0.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1f43837d4ca000243fd7fd6301947d7cb93360d03cd08369969450cc6b2ce3b4"},
+ {file = "propcache-0.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:261df2e9474a5949c46e962065d88eb9b96ce0f2bd30e9d3136bcde84befd8f2"},
+ {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e514326b79e51f0a177daab1052bc164d9d9e54133797a3a58d24c9c87a3fe6d"},
+ {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a996adb6904f85894570301939afeee65f072b4fd265ed7e569e8d9058e4ec"},
+ {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76cace5d6b2a54e55b137669b30f31aa15977eeed390c7cbfb1dafa8dfe9a701"},
+ {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31248e44b81d59d6addbb182c4720f90b44e1efdc19f58112a3c3a1615fb47ef"},
+ {file = "propcache-0.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abb7fa19dbf88d3857363e0493b999b8011eea856b846305d8c0512dfdf8fbb1"},
+ {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d81ac3ae39d38588ad0549e321e6f773a4e7cc68e7751524a22885d5bbadf886"},
+ {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:cc2782eb0f7a16462285b6f8394bbbd0e1ee5f928034e941ffc444012224171b"},
+ {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:db429c19a6c7e8a1c320e6a13c99799450f411b02251fb1b75e6217cf4a14fcb"},
+ {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:21d8759141a9e00a681d35a1f160892a36fb6caa715ba0b832f7747da48fb6ea"},
+ {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2ca6d378f09adb13837614ad2754fa8afaee330254f404299611bce41a8438cb"},
+ {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:34a624af06c048946709f4278b4176470073deda88d91342665d95f7c6270fbe"},
+ {file = "propcache-0.3.2-cp39-cp39-win32.whl", hash = "sha256:4ba3fef1c30f306b1c274ce0b8baaa2c3cdd91f645c48f06394068f37d3837a1"},
+ {file = "propcache-0.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:7a2368eed65fc69a7a7a40b27f22e85e7627b74216f0846b04ba5c116e191ec9"},
+ {file = "propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f"},
+ {file = "propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168"},
]
[[package]]
@@ -1112,21 +1930,21 @@ testing = ["google-api-core (>=1.31.5)"]
[[package]]
name = "protobuf"
-version = "6.32.0"
+version = "6.32.1"
description = ""
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
- {file = "protobuf-6.32.0-cp310-abi3-win32.whl", hash = "sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741"},
- {file = "protobuf-6.32.0-cp310-abi3-win_amd64.whl", hash = "sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e"},
- {file = "protobuf-6.32.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0"},
- {file = "protobuf-6.32.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1"},
- {file = "protobuf-6.32.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c"},
- {file = "protobuf-6.32.0-cp39-cp39-win32.whl", hash = "sha256:7db8ed09024f115ac877a1427557b838705359f047b2ff2f2b2364892d19dacb"},
- {file = "protobuf-6.32.0-cp39-cp39-win_amd64.whl", hash = "sha256:15eba1b86f193a407607112ceb9ea0ba9569aed24f93333fe9a497cf2fda37d3"},
- {file = "protobuf-6.32.0-py3-none-any.whl", hash = "sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783"},
- {file = "protobuf-6.32.0.tar.gz", hash = "sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2"},
+ {file = "protobuf-6.32.1-cp310-abi3-win32.whl", hash = "sha256:a8a32a84bc9f2aad712041b8b366190f71dde248926da517bde9e832e4412085"},
+ {file = "protobuf-6.32.1-cp310-abi3-win_amd64.whl", hash = "sha256:b00a7d8c25fa471f16bc8153d0e53d6c9e827f0953f3c09aaa4331c718cae5e1"},
+ {file = "protobuf-6.32.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d8c7e6eb619ffdf105ee4ab76af5a68b60a9d0f66da3ea12d1640e6d8dab7281"},
+ {file = "protobuf-6.32.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:2f5b80a49e1eb7b86d85fcd23fe92df154b9730a725c3b38c4e43b9d77018bf4"},
+ {file = "protobuf-6.32.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:b1864818300c297265c83a4982fd3169f97122c299f56a56e2445c3698d34710"},
+ {file = "protobuf-6.32.1-cp39-cp39-win32.whl", hash = "sha256:68ff170bac18c8178f130d1ccb94700cf72852298e016a2443bdb9502279e5f1"},
+ {file = "protobuf-6.32.1-cp39-cp39-win_amd64.whl", hash = "sha256:d0975d0b2f3e6957111aa3935d08a0eb7e006b1505d825f862a1fffc8348e122"},
+ {file = "protobuf-6.32.1-py3-none-any.whl", hash = "sha256:2601b779fc7d32a866c6b4404f9d42a3f67c5b9f3f15b4db3cccabe06b95c346"},
+ {file = "protobuf-6.32.1.tar.gz", hash = "sha256:ee2469e4a021474ab9baafea6cd070e5bf27c7d29433504ddea1a4ee5850f68d"},
]
[[package]]
@@ -1249,14 +2067,14 @@ files = [
[[package]]
name = "pydantic"
-version = "2.11.7"
+version = "2.11.9"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
- {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"},
- {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"},
+ {file = "pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2"},
+ {file = "pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2"},
]
[package.dependencies]
@@ -1438,14 +2256,14 @@ rsa = ["cryptography"]
[[package]]
name = "pyparsing"
-version = "3.2.3"
-description = "pyparsing module - Classes and methods to define and execute parsing grammars"
+version = "3.2.4"
+description = "pyparsing - Classes and methods to define and execute parsing grammars"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
- {file = "pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf"},
- {file = "pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be"},
+ {file = "pyparsing-3.2.4-py3-none-any.whl", hash = "sha256:91d0fcde680d42cd031daf3a6ba20da3107e08a75de50da58360e7d94ab24d36"},
+ {file = "pyparsing-3.2.4.tar.gz", hash = "sha256:fff89494f45559d0f2ce46613b419f632bbb6afbdaed49696d322bcf98a58e99"},
]
[package.extras]
@@ -1462,6 +2280,22 @@ files = [
{file = "pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310"},
]
+[[package]]
+name = "pyreadline3"
+version = "3.5.4"
+description = "A python implementation of GNU readline."
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "sys_platform == \"win32\""
+files = [
+ {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"},
+ {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"},
+]
+
+[package.extras]
+dev = ["build", "flake8", "mypy", "pytest", "twine"]
+
[[package]]
name = "pysocks"
version = "1.7.1"
@@ -1512,6 +2346,166 @@ files = [
[package.extras]
cli = ["click (>=5.0)"]
+[[package]]
+name = "pyyaml"
+version = "6.0.2"
+description = "YAML parser and emitter for Python"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
+ {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
+ {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
+ {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
+ {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
+ {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
+ {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
+ {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
+ {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
+ {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
+ {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
+ {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
+ {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
+ {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
+ {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
+ {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
+ {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
+ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
+]
+
+[[package]]
+name = "regex"
+version = "2025.9.1"
+description = "Alternative regular expression module, to replace re."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "regex-2025.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c5aa2a6a73bf218515484b36a0d20c6ad9dc63f6339ff6224147b0e2c095ee55"},
+ {file = "regex-2025.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c2ff5c01d5e47ad5fc9d31bcd61e78c2fa0068ed00cab86b7320214446da766"},
+ {file = "regex-2025.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d49dc84e796b666181de8a9973284cad6616335f01b52bf099643253094920fc"},
+ {file = "regex-2025.9.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9914fe1040874f83c15fcea86d94ea54091b0666eab330aaab69e30d106aabe"},
+ {file = "regex-2025.9.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e71bceb3947362ec5eabd2ca0870bb78eae4edfc60c6c21495133c01b6cd2df4"},
+ {file = "regex-2025.9.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:67a74456f410fe5e869239ee7a5423510fe5121549af133809d9591a8075893f"},
+ {file = "regex-2025.9.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5c3b96ed0223b32dbdc53a83149b6de7ca3acd5acd9c8e64b42a166228abe29c"},
+ {file = "regex-2025.9.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:113d5aa950f428faf46fd77d452df62ebb4cc6531cb619f6cc30a369d326bfbd"},
+ {file = "regex-2025.9.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fcdeb38de4f7f3d69d798f4f371189061446792a84e7c92b50054c87aae9c07c"},
+ {file = "regex-2025.9.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4bcdff370509164b67a6c8ec23c9fb40797b72a014766fdc159bb809bd74f7d8"},
+ {file = "regex-2025.9.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:7383efdf6e8e8c61d85e00cfb2e2e18da1a621b8bfb4b0f1c2747db57b942b8f"},
+ {file = "regex-2025.9.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1ec2bd3bdf0f73f7e9f48dca550ba7d973692d5e5e9a90ac42cc5f16c4432d8b"},
+ {file = "regex-2025.9.1-cp310-cp310-win32.whl", hash = "sha256:9627e887116c4e9c0986d5c3b4f52bcfe3df09850b704f62ec3cbf177a0ae374"},
+ {file = "regex-2025.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:94533e32dc0065eca43912ee6649c90ea0681d59f56d43c45b5bcda9a740b3dd"},
+ {file = "regex-2025.9.1-cp310-cp310-win_arm64.whl", hash = "sha256:a874a61bb580d48642ffd338570ee24ab13fa023779190513fcacad104a6e251"},
+ {file = "regex-2025.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e5bcf112b09bfd3646e4db6bf2e598534a17d502b0c01ea6550ba4eca780c5e6"},
+ {file = "regex-2025.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:67a0295a3c31d675a9ee0238d20238ff10a9a2fdb7a1323c798fc7029578b15c"},
+ {file = "regex-2025.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea8267fbadc7d4bd7c1301a50e85c2ff0de293ff9452a1a9f8d82c6cafe38179"},
+ {file = "regex-2025.9.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6aeff21de7214d15e928fb5ce757f9495214367ba62875100d4c18d293750cc1"},
+ {file = "regex-2025.9.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d89f1bbbbbc0885e1c230f7770d5e98f4f00b0ee85688c871d10df8b184a6323"},
+ {file = "regex-2025.9.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ca3affe8ddea498ba9d294ab05f5f2d3b5ad5d515bc0d4a9016dd592a03afe52"},
+ {file = "regex-2025.9.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91892a7a9f0a980e4c2c85dd19bc14de2b219a3a8867c4b5664b9f972dcc0c78"},
+ {file = "regex-2025.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e1cb40406f4ae862710615f9f636c1e030fd6e6abe0e0f65f6a695a2721440c6"},
+ {file = "regex-2025.9.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94f6cff6f7e2149c7e6499a6ecd4695379eeda8ccbccb9726e8149f2fe382e92"},
+ {file = "regex-2025.9.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6c0226fb322b82709e78c49cc33484206647f8a39954d7e9de1567f5399becd0"},
+ {file = "regex-2025.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a12f59c7c380b4fcf7516e9cbb126f95b7a9518902bcf4a852423ff1dcd03e6a"},
+ {file = "regex-2025.9.1-cp311-cp311-win32.whl", hash = "sha256:49865e78d147a7a4f143064488da5d549be6bfc3f2579e5044cac61f5c92edd4"},
+ {file = "regex-2025.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:d34b901f6f2f02ef60f4ad3855d3a02378c65b094efc4b80388a3aeb700a5de7"},
+ {file = "regex-2025.9.1-cp311-cp311-win_arm64.whl", hash = "sha256:47d7c2dab7e0b95b95fd580087b6ae196039d62306a592fa4e162e49004b6299"},
+ {file = "regex-2025.9.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:84a25164bd8dcfa9f11c53f561ae9766e506e580b70279d05a7946510bdd6f6a"},
+ {file = "regex-2025.9.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:645e88a73861c64c1af558dd12294fb4e67b5c1eae0096a60d7d8a2143a611c7"},
+ {file = "regex-2025.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10a450cba5cd5409526ee1d4449f42aad38dd83ac6948cbd6d7f71ca7018f7db"},
+ {file = "regex-2025.9.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9dc5991592933a4192c166eeb67b29d9234f9c86344481173d1bc52f73a7104"},
+ {file = "regex-2025.9.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a32291add816961aab472f4fad344c92871a2ee33c6c219b6598e98c1f0108f2"},
+ {file = "regex-2025.9.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:588c161a68a383478e27442a678e3b197b13c5ba51dbba40c1ccb8c4c7bee9e9"},
+ {file = "regex-2025.9.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47829ffaf652f30d579534da9085fe30c171fa2a6744a93d52ef7195dc38218b"},
+ {file = "regex-2025.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e978e5a35b293ea43f140c92a3269b6ab13fe0a2bf8a881f7ac740f5a6ade85"},
+ {file = "regex-2025.9.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf09903e72411f4bf3ac1eddd624ecfd423f14b2e4bf1c8b547b72f248b7bf7"},
+ {file = "regex-2025.9.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d016b0f77be63e49613c9e26aaf4a242f196cd3d7a4f15898f5f0ab55c9b24d2"},
+ {file = "regex-2025.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:656563e620de6908cd1c9d4f7b9e0777e3341ca7db9d4383bcaa44709c90281e"},
+ {file = "regex-2025.9.1-cp312-cp312-win32.whl", hash = "sha256:df33f4ef07b68f7ab637b1dbd70accbf42ef0021c201660656601e8a9835de45"},
+ {file = "regex-2025.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:5aba22dfbc60cda7c0853516104724dc904caa2db55f2c3e6e984eb858d3edf3"},
+ {file = "regex-2025.9.1-cp312-cp312-win_arm64.whl", hash = "sha256:ec1efb4c25e1849c2685fa95da44bfde1b28c62d356f9c8d861d4dad89ed56e9"},
+ {file = "regex-2025.9.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bc6834727d1b98d710a63e6c823edf6ffbf5792eba35d3fa119531349d4142ef"},
+ {file = "regex-2025.9.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c3dc05b6d579875719bccc5f3037b4dc80433d64e94681a0061845bd8863c025"},
+ {file = "regex-2025.9.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22213527df4c985ec4a729b055a8306272d41d2f45908d7bacb79be0fa7a75ad"},
+ {file = "regex-2025.9.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8e3f6e3c5a5a1adc3f7ea1b5aec89abfc2f4fbfba55dafb4343cd1d084f715b2"},
+ {file = "regex-2025.9.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bcb89c02a0d6c2bec9b0bb2d8c78782699afe8434493bfa6b4021cc51503f249"},
+ {file = "regex-2025.9.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b0e2f95413eb0c651cd1516a670036315b91b71767af83bc8525350d4375ccba"},
+ {file = "regex-2025.9.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:09a41dc039e1c97d3c2ed3e26523f748e58c4de3ea7a31f95e1cf9ff973fff5a"},
+ {file = "regex-2025.9.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f0b4258b161094f66857a26ee938d3fe7b8a5063861e44571215c44fbf0e5df"},
+ {file = "regex-2025.9.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bf70e18ac390e6977ea7e56f921768002cb0fa359c4199606c7219854ae332e0"},
+ {file = "regex-2025.9.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b84036511e1d2bb0a4ff1aec26951caa2dea8772b223c9e8a19ed8885b32dbac"},
+ {file = "regex-2025.9.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c2e05dcdfe224047f2a59e70408274c325d019aad96227ab959403ba7d58d2d7"},
+ {file = "regex-2025.9.1-cp313-cp313-win32.whl", hash = "sha256:3b9a62107a7441b81ca98261808fed30ae36ba06c8b7ee435308806bd53c1ed8"},
+ {file = "regex-2025.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:b38afecc10c177eb34cfae68d669d5161880849ba70c05cbfbe409f08cc939d7"},
+ {file = "regex-2025.9.1-cp313-cp313-win_arm64.whl", hash = "sha256:ec329890ad5e7ed9fc292858554d28d58d56bf62cf964faf0aa57964b21155a0"},
+ {file = "regex-2025.9.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:72fb7a016467d364546f22b5ae86c45680a4e0de6b2a6f67441d22172ff641f1"},
+ {file = "regex-2025.9.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c9527fa74eba53f98ad86be2ba003b3ebe97e94b6eb2b916b31b5f055622ef03"},
+ {file = "regex-2025.9.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c905d925d194c83a63f92422af7544ec188301451b292c8b487f0543726107ca"},
+ {file = "regex-2025.9.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:74df7c74a63adcad314426b1f4ea6054a5ab25d05b0244f0c07ff9ce640fa597"},
+ {file = "regex-2025.9.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4f6e935e98ea48c7a2e8be44494de337b57a204470e7f9c9c42f912c414cd6f5"},
+ {file = "regex-2025.9.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4a62d033cd9ebefc7c5e466731a508dfabee827d80b13f455de68a50d3c2543d"},
+ {file = "regex-2025.9.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef971ebf2b93bdc88d8337238be4dfb851cc97ed6808eb04870ef67589415171"},
+ {file = "regex-2025.9.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d936a1db208bdca0eca1f2bb2c1ba1d8370b226785c1e6db76e32a228ffd0ad5"},
+ {file = "regex-2025.9.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:7e786d9e4469698fc63815b8de08a89165a0aa851720eb99f5e0ea9d51dd2b6a"},
+ {file = "regex-2025.9.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:6b81d7dbc5466ad2c57ce3a0ddb717858fe1a29535c8866f8514d785fdb9fc5b"},
+ {file = "regex-2025.9.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cd4890e184a6feb0ef195338a6ce68906a8903a0f2eb7e0ab727dbc0a3156273"},
+ {file = "regex-2025.9.1-cp314-cp314-win32.whl", hash = "sha256:34679a86230e46164c9e0396b56cab13c0505972343880b9e705083cc5b8ec86"},
+ {file = "regex-2025.9.1-cp314-cp314-win_amd64.whl", hash = "sha256:a1196e530a6bfa5f4bde029ac5b0295a6ecfaaffbfffede4bbaf4061d9455b70"},
+ {file = "regex-2025.9.1-cp314-cp314-win_arm64.whl", hash = "sha256:f46d525934871ea772930e997d577d48c6983e50f206ff7b66d4ac5f8941e993"},
+ {file = "regex-2025.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a13d20007dce3c4b00af5d84f6c191ed1c0f70928c6d9b6cd7b8d2f125df7f46"},
+ {file = "regex-2025.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d6b046b0a01cb713fd53ef36cb59db4b0062b343db28e83b52ac6aa01ee5b368"},
+ {file = "regex-2025.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0fa9a7477288717f42dbd02ff5d13057549e9a8cdb81f224c313154cc10bab52"},
+ {file = "regex-2025.9.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2b3ad150c6bc01a8cd5030040675060e2adbe6cbc50aadc4da42c6d32ec266e"},
+ {file = "regex-2025.9.1-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:aa88d5a82dfe80deaf04e8c39c8b0ad166d5d527097eb9431cb932c44bf88715"},
+ {file = "regex-2025.9.1-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6f1dae2cf6c2dbc6fd2526653692c144721b3cf3f769d2a3c3aa44d0f38b9a58"},
+ {file = "regex-2025.9.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ff62a3022914fc19adaa76b65e03cf62bc67ea16326cbbeb170d280710a7d719"},
+ {file = "regex-2025.9.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a34ef82216189d823bc82f614d1031cb0b919abef27cecfd7b07d1e9a8bdeeb4"},
+ {file = "regex-2025.9.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d40e6b49daae9ebbd7fa4e600697372cba85b826592408600068e83a3c47211"},
+ {file = "regex-2025.9.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0aeb0fe80331059c152a002142699a89bf3e44352aee28261315df0c9874759b"},
+ {file = "regex-2025.9.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a90014d29cb3098403d82a879105d1418edbbdf948540297435ea6e377023ea7"},
+ {file = "regex-2025.9.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6ff623271e0b0cc5a95b802666bbd70f17ddd641582d65b10fb260cc0c003529"},
+ {file = "regex-2025.9.1-cp39-cp39-win32.whl", hash = "sha256:d161bfdeabe236290adfd8c7588da7f835d67e9e7bf2945f1e9e120622839ba6"},
+ {file = "regex-2025.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:43ebc77a7dfe36661192afd8d7df5e8be81ec32d2ad0c65b536f66ebfec3dece"},
+ {file = "regex-2025.9.1-cp39-cp39-win_arm64.whl", hash = "sha256:5d74b557cf5554001a869cda60b9a619be307df4d10155894aeaad3ee67c9899"},
+ {file = "regex-2025.9.1.tar.gz", hash = "sha256:88ac07b38d20b54d79e704e38aa3bd2c0f8027432164226bdee201a1c0c9c9ff"},
+]
+
[[package]]
name = "requests"
version = "2.32.5"
@@ -1568,6 +2562,45 @@ files = [
[package.dependencies]
pyasn1 = ">=0.1.3"
+[[package]]
+name = "safetensors"
+version = "0.6.2"
+description = ""
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "safetensors-0.6.2-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:9c85ede8ec58f120bad982ec47746981e210492a6db876882aa021446af8ffba"},
+ {file = "safetensors-0.6.2-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d6675cf4b39c98dbd7d940598028f3742e0375a6b4d4277e76beb0c35f4b843b"},
+ {file = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d2d2b3ce1e2509c68932ca03ab8f20570920cd9754b05063d4368ee52833ecd"},
+ {file = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:93de35a18f46b0f5a6a1f9e26d91b442094f2df02e9fd7acf224cfec4238821a"},
+ {file = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89a89b505f335640f9120fac65ddeb83e40f1fd081cb8ed88b505bdccec8d0a1"},
+ {file = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4d0d0b937e04bdf2ae6f70cd3ad51328635fe0e6214aa1fc811f3b576b3bda"},
+ {file = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8045db2c872db8f4cbe3faa0495932d89c38c899c603f21e9b6486951a5ecb8f"},
+ {file = "safetensors-0.6.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:81e67e8bab9878bb568cffbc5f5e655adb38d2418351dc0859ccac158f753e19"},
+ {file = "safetensors-0.6.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0e4d029ab0a0e0e4fdf142b194514695b1d7d3735503ba700cf36d0fc7136ce"},
+ {file = "safetensors-0.6.2-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:fa48268185c52bfe8771e46325a1e21d317207bcabcb72e65c6e28e9ffeb29c7"},
+ {file = "safetensors-0.6.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:d83c20c12c2d2f465997c51b7ecb00e407e5f94d7dec3ea0cc11d86f60d3fde5"},
+ {file = "safetensors-0.6.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d944cea65fad0ead848b6ec2c37cc0b197194bec228f8020054742190e9312ac"},
+ {file = "safetensors-0.6.2-cp38-abi3-win32.whl", hash = "sha256:cab75ca7c064d3911411461151cb69380c9225798a20e712b102edda2542ddb1"},
+ {file = "safetensors-0.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:c7b214870df923cbc1593c3faee16bec59ea462758699bd3fee399d00aac072c"},
+ {file = "safetensors-0.6.2.tar.gz", hash = "sha256:43ff2aa0e6fa2dc3ea5524ac7ad93a9839256b8703761e76e2d0b2a3fa4f15d9"},
+]
+
+[package.extras]
+all = ["safetensors[jax]", "safetensors[numpy]", "safetensors[paddlepaddle]", "safetensors[pinned-tf]", "safetensors[quality]", "safetensors[testing]", "safetensors[torch]"]
+dev = ["safetensors[all]"]
+jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "safetensors[numpy]"]
+mlx = ["mlx (>=0.0.9)"]
+numpy = ["numpy (>=1.21.6)"]
+paddlepaddle = ["paddlepaddle (>=2.4.1)", "safetensors[numpy]"]
+pinned-tf = ["safetensors[numpy]", "tensorflow (==2.18.0)"]
+quality = ["ruff"]
+tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"]
+testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools-rust (>=1.5.2)"]
+testingfree = ["huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools-rust (>=1.5.2)"]
+torch = ["safetensors[numpy]", "torch (>=1.10)"]
+
[[package]]
name = "scikit-learn"
version = "1.7.2"
@@ -1626,67 +2659,73 @@ tests = ["matplotlib (>=3.5.0)", "mypy (>=1.15)", "numpydoc (>=1.2.0)", "pandas
[[package]]
name = "scipy"
-version = "1.16.1"
+version = "1.16.2"
description = "Fundamental algorithms for scientific computing in Python"
optional = false
python-versions = ">=3.11"
groups = ["main"]
files = [
- {file = "scipy-1.16.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c033fa32bab91dc98ca59d0cf23bb876454e2bb02cbe592d5023138778f70030"},
- {file = "scipy-1.16.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6e5c2f74e5df33479b5cd4e97a9104c511518fbd979aa9b8f6aec18b2e9ecae7"},
- {file = "scipy-1.16.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0a55ffe0ba0f59666e90951971a884d1ff6f4ec3275a48f472cfb64175570f77"},
- {file = "scipy-1.16.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:f8a5d6cd147acecc2603fbd382fed6c46f474cccfcf69ea32582e033fb54dcfe"},
- {file = "scipy-1.16.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cb18899127278058bcc09e7b9966d41a5a43740b5bb8dcba401bd983f82e885b"},
- {file = "scipy-1.16.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adccd93a2fa937a27aae826d33e3bfa5edf9aa672376a4852d23a7cd67a2e5b7"},
- {file = "scipy-1.16.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:18aca1646a29ee9a0625a1be5637fa798d4d81fdf426481f06d69af828f16958"},
- {file = "scipy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d85495cef541729a70cdddbbf3e6b903421bc1af3e8e3a9a72a06751f33b7c39"},
- {file = "scipy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:226652fca853008119c03a8ce71ffe1b3f6d2844cc1686e8f9806edafae68596"},
- {file = "scipy-1.16.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:81b433bbeaf35728dad619afc002db9b189e45eebe2cd676effe1fb93fef2b9c"},
- {file = "scipy-1.16.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:886cc81fdb4c6903a3bb0464047c25a6d1016fef77bb97949817d0c0d79f9e04"},
- {file = "scipy-1.16.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:15240c3aac087a522b4eaedb09f0ad061753c5eebf1ea430859e5bf8640d5919"},
- {file = "scipy-1.16.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:65f81a25805f3659b48126b5053d9e823d3215e4a63730b5e1671852a1705921"},
- {file = "scipy-1.16.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6c62eea7f607f122069b9bad3f99489ddca1a5173bef8a0c75555d7488b6f725"},
- {file = "scipy-1.16.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f965bbf3235b01c776115ab18f092a95aa74c271a52577bcb0563e85738fd618"},
- {file = "scipy-1.16.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f006e323874ffd0b0b816d8c6a8e7f9a73d55ab3b8c3f72b752b226d0e3ac83d"},
- {file = "scipy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8fd15fc5085ab4cca74cb91fe0a4263b1f32e4420761ddae531ad60934c2119"},
- {file = "scipy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:f7b8013c6c066609577d910d1a2a077021727af07b6fab0ee22c2f901f22352a"},
- {file = "scipy-1.16.1-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:5451606823a5e73dfa621a89948096c6528e2896e40b39248295d3a0138d594f"},
- {file = "scipy-1.16.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:89728678c5ca5abd610aee148c199ac1afb16e19844401ca97d43dc548a354eb"},
- {file = "scipy-1.16.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e756d688cb03fd07de0fffad475649b03cb89bee696c98ce508b17c11a03f95c"},
- {file = "scipy-1.16.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5aa2687b9935da3ed89c5dbed5234576589dd28d0bf7cd237501ccfbdf1ad608"},
- {file = "scipy-1.16.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0851f6a1e537fe9399f35986897e395a1aa61c574b178c0d456be5b1a0f5ca1f"},
- {file = "scipy-1.16.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fedc2cbd1baed37474b1924c331b97bdff611d762c196fac1a9b71e67b813b1b"},
- {file = "scipy-1.16.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2ef500e72f9623a6735769e4b93e9dcb158d40752cdbb077f305487e3e2d1f45"},
- {file = "scipy-1.16.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:978d8311674b05a8f7ff2ea6c6bce5d8b45a0cb09d4c5793e0318f448613ea65"},
- {file = "scipy-1.16.1-cp313-cp313-win_amd64.whl", hash = "sha256:81929ed0fa7a5713fcdd8b2e6f73697d3b4c4816d090dd34ff937c20fa90e8ab"},
- {file = "scipy-1.16.1-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:bcc12db731858abda693cecdb3bdc9e6d4bd200213f49d224fe22df82687bdd6"},
- {file = "scipy-1.16.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:744d977daa4becb9fc59135e75c069f8d301a87d64f88f1e602a9ecf51e77b27"},
- {file = "scipy-1.16.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:dc54f76ac18073bcecffb98d93f03ed6b81a92ef91b5d3b135dcc81d55a724c7"},
- {file = "scipy-1.16.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:367d567ee9fc1e9e2047d31f39d9d6a7a04e0710c86e701e053f237d14a9b4f6"},
- {file = "scipy-1.16.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4cf5785e44e19dcd32a0e4807555e1e9a9b8d475c6afff3d21c3c543a6aa84f4"},
- {file = "scipy-1.16.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3d0b80fb26d3e13a794c71d4b837e2a589d839fd574a6bbb4ee1288c213ad4a3"},
- {file = "scipy-1.16.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8503517c44c18d1030d666cb70aaac1cc8913608816e06742498833b128488b7"},
- {file = "scipy-1.16.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:30cc4bb81c41831ecfd6dc450baf48ffd80ef5aed0f5cf3ea775740e80f16ecc"},
- {file = "scipy-1.16.1-cp313-cp313t-win_amd64.whl", hash = "sha256:c24fa02f7ed23ae514460a22c57eca8f530dbfa50b1cfdbf4f37c05b5309cc39"},
- {file = "scipy-1.16.1-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:796a5a9ad36fa3a782375db8f4241ab02a091308eb079746bc0f874c9b998318"},
- {file = "scipy-1.16.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:3ea0733a2ff73fd6fdc5fecca54ee9b459f4d74f00b99aced7d9a3adb43fb1cc"},
- {file = "scipy-1.16.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:85764fb15a2ad994e708258bb4ed8290d1305c62a4e1ef07c414356a24fcfbf8"},
- {file = "scipy-1.16.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:ca66d980469cb623b1759bdd6e9fd97d4e33a9fad5b33771ced24d0cb24df67e"},
- {file = "scipy-1.16.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e7cc1ffcc230f568549fc56670bcf3df1884c30bd652c5da8138199c8c76dae0"},
- {file = "scipy-1.16.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ddfb1e8d0b540cb4ee9c53fc3dea3186f97711248fb94b4142a1b27178d8b4b"},
- {file = "scipy-1.16.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4dc0e7be79e95d8ba3435d193e0d8ce372f47f774cffd882f88ea4e1e1ddc731"},
- {file = "scipy-1.16.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f23634f9e5adb51b2a77766dac217063e764337fbc816aa8ad9aaebcd4397fd3"},
- {file = "scipy-1.16.1-cp314-cp314-win_amd64.whl", hash = "sha256:57d75524cb1c5a374958a2eae3d84e1929bb971204cc9d52213fb8589183fc19"},
- {file = "scipy-1.16.1-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:d8da7c3dd67bcd93f15618938f43ed0995982eb38973023d46d4646c4283ad65"},
- {file = "scipy-1.16.1-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:cc1d2f2fd48ba1e0620554fe5bc44d3e8f5d4185c8c109c7fbdf5af2792cfad2"},
- {file = "scipy-1.16.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:21a611ced9275cb861bacadbada0b8c0623bc00b05b09eb97f23b370fc2ae56d"},
- {file = "scipy-1.16.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:8dfbb25dffc4c3dd9371d8ab456ca81beeaf6f9e1c2119f179392f0dc1ab7695"},
- {file = "scipy-1.16.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f0ebb7204f063fad87fc0a0e4ff4a2ff40b2a226e4ba1b7e34bf4b79bf97cd86"},
- {file = "scipy-1.16.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f1b9e5962656f2734c2b285a8745358ecb4e4efbadd00208c80a389227ec61ff"},
- {file = "scipy-1.16.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e1a106f8c023d57a2a903e771228bf5c5b27b5d692088f457acacd3b54511e4"},
- {file = "scipy-1.16.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:709559a1db68a9abc3b2c8672c4badf1614f3b440b3ab326d86a5c0491eafae3"},
- {file = "scipy-1.16.1-cp314-cp314t-win_amd64.whl", hash = "sha256:c0c804d60492a0aad7f5b2bb1862f4548b990049e27e828391ff2bf6f7199998"},
- {file = "scipy-1.16.1.tar.gz", hash = "sha256:44c76f9e8b6e8e488a586190ab38016e4ed2f8a038af7cd3defa903c0a2238b3"},
+ {file = "scipy-1.16.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:6ab88ea43a57da1af33292ebd04b417e8e2eaf9d5aa05700be8d6e1b6501cd92"},
+ {file = "scipy-1.16.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c95e96c7305c96ede73a7389f46ccd6c659c4da5ef1b2789466baeaed3622b6e"},
+ {file = "scipy-1.16.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:87eb178db04ece7c698220d523c170125dbffebb7af0345e66c3554f6f60c173"},
+ {file = "scipy-1.16.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:4e409eac067dcee96a57fbcf424c13f428037827ec7ee3cb671ff525ca4fc34d"},
+ {file = "scipy-1.16.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e574be127bb760f0dad24ff6e217c80213d153058372362ccb9555a10fc5e8d2"},
+ {file = "scipy-1.16.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f5db5ba6188d698ba7abab982ad6973265b74bb40a1efe1821b58c87f73892b9"},
+ {file = "scipy-1.16.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec6e74c4e884104ae006d34110677bfe0098203a3fec2f3faf349f4cb05165e3"},
+ {file = "scipy-1.16.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:912f46667d2d3834bc3d57361f854226475f695eb08c08a904aadb1c936b6a88"},
+ {file = "scipy-1.16.2-cp311-cp311-win_amd64.whl", hash = "sha256:91e9e8a37befa5a69e9cacbe0bcb79ae5afb4a0b130fd6db6ee6cc0d491695fa"},
+ {file = "scipy-1.16.2-cp311-cp311-win_arm64.whl", hash = "sha256:f3bf75a6dcecab62afde4d1f973f1692be013110cad5338007927db8da73249c"},
+ {file = "scipy-1.16.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:89d6c100fa5c48472047632e06f0876b3c4931aac1f4291afc81a3644316bb0d"},
+ {file = "scipy-1.16.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ca748936cd579d3f01928b30a17dc474550b01272d8046e3e1ee593f23620371"},
+ {file = "scipy-1.16.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:fac4f8ce2ddb40e2e3d0f7ec36d2a1e7f92559a2471e59aec37bd8d9de01fec0"},
+ {file = "scipy-1.16.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:033570f1dcefd79547a88e18bccacff025c8c647a330381064f561d43b821232"},
+ {file = "scipy-1.16.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ea3421209bf00c8a5ef2227de496601087d8f638a2363ee09af059bd70976dc1"},
+ {file = "scipy-1.16.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f66bd07ba6f84cd4a380b41d1bf3c59ea488b590a2ff96744845163309ee8e2f"},
+ {file = "scipy-1.16.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5e9feab931bd2aea4a23388c962df6468af3d808ddf2d40f94a81c5dc38f32ef"},
+ {file = "scipy-1.16.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:03dfc75e52f72cf23ec2ced468645321407faad8f0fe7b1f5b49264adbc29cb1"},
+ {file = "scipy-1.16.2-cp312-cp312-win_amd64.whl", hash = "sha256:0ce54e07bbb394b417457409a64fd015be623f36e330ac49306433ffe04bc97e"},
+ {file = "scipy-1.16.2-cp312-cp312-win_arm64.whl", hash = "sha256:2a8ffaa4ac0df81a0b94577b18ee079f13fecdb924df3328fc44a7dc5ac46851"},
+ {file = "scipy-1.16.2-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:84f7bf944b43e20b8a894f5fe593976926744f6c185bacfcbdfbb62736b5cc70"},
+ {file = "scipy-1.16.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:5c39026d12edc826a1ef2ad35ad1e6d7f087f934bb868fc43fa3049c8b8508f9"},
+ {file = "scipy-1.16.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e52729ffd45b68777c5319560014d6fd251294200625d9d70fd8626516fc49f5"},
+ {file = "scipy-1.16.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:024dd4a118cccec09ca3209b7e8e614931a6ffb804b2a601839499cb88bdf925"},
+ {file = "scipy-1.16.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7a5dc7ee9c33019973a470556081b0fd3c9f4c44019191039f9769183141a4d9"},
+ {file = "scipy-1.16.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c2275ff105e508942f99d4e3bc56b6ef5e4b3c0af970386ca56b777608ce95b7"},
+ {file = "scipy-1.16.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:af80196eaa84f033e48444d2e0786ec47d328ba00c71e4299b602235ffef9acb"},
+ {file = "scipy-1.16.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9fb1eb735fe3d6ed1f89918224e3385fbf6f9e23757cacc35f9c78d3b712dd6e"},
+ {file = "scipy-1.16.2-cp313-cp313-win_amd64.whl", hash = "sha256:fda714cf45ba43c9d3bae8f2585c777f64e3f89a2e073b668b32ede412d8f52c"},
+ {file = "scipy-1.16.2-cp313-cp313-win_arm64.whl", hash = "sha256:2f5350da923ccfd0b00e07c3e5cfb316c1c0d6c1d864c07a72d092e9f20db104"},
+ {file = "scipy-1.16.2-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:53d8d2ee29b925344c13bda64ab51785f016b1b9617849dac10897f0701b20c1"},
+ {file = "scipy-1.16.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:9e05e33657efb4c6a9d23bd8300101536abd99c85cca82da0bffff8d8764d08a"},
+ {file = "scipy-1.16.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:7fe65b36036357003b3ef9d37547abeefaa353b237e989c21027b8ed62b12d4f"},
+ {file = "scipy-1.16.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6406d2ac6d40b861cccf57f49592f9779071655e9f75cd4f977fa0bdd09cb2e4"},
+ {file = "scipy-1.16.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ff4dc42bd321991fbf611c23fc35912d690f731c9914bf3af8f417e64aca0f21"},
+ {file = "scipy-1.16.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:654324826654d4d9133e10675325708fb954bc84dae6e9ad0a52e75c6b1a01d7"},
+ {file = "scipy-1.16.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63870a84cd15c44e65220eaed2dac0e8f8b26bbb991456a033c1d9abfe8a94f8"},
+ {file = "scipy-1.16.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:fa01f0f6a3050fa6a9771a95d5faccc8e2f5a92b4a2e5440a0fa7264a2398472"},
+ {file = "scipy-1.16.2-cp313-cp313t-win_amd64.whl", hash = "sha256:116296e89fba96f76353a8579820c2512f6e55835d3fad7780fece04367de351"},
+ {file = "scipy-1.16.2-cp313-cp313t-win_arm64.whl", hash = "sha256:98e22834650be81d42982360382b43b17f7ba95e0e6993e2a4f5b9ad9283a94d"},
+ {file = "scipy-1.16.2-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:567e77755019bb7461513c87f02bb73fb65b11f049aaaa8ca17cfaa5a5c45d77"},
+ {file = "scipy-1.16.2-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:17d9bb346194e8967296621208fcdfd39b55498ef7d2f376884d5ac47cec1a70"},
+ {file = "scipy-1.16.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:0a17541827a9b78b777d33b623a6dcfe2ef4a25806204d08ead0768f4e529a88"},
+ {file = "scipy-1.16.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:d7d4c6ba016ffc0f9568d012f5f1eb77ddd99412aea121e6fa8b4c3b7cbad91f"},
+ {file = "scipy-1.16.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9702c4c023227785c779cba2e1d6f7635dbb5b2e0936cdd3a4ecb98d78fd41eb"},
+ {file = "scipy-1.16.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d1cdf0ac28948d225decdefcc45ad7dd91716c29ab56ef32f8e0d50657dffcc7"},
+ {file = "scipy-1.16.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:70327d6aa572a17c2941cdfb20673f82e536e91850a2e4cb0c5b858b690e1548"},
+ {file = "scipy-1.16.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5221c0b2a4b58aa7c4ed0387d360fd90ee9086d383bb34d9f2789fafddc8a936"},
+ {file = "scipy-1.16.2-cp314-cp314-win_amd64.whl", hash = "sha256:f5a85d7b2b708025af08f060a496dd261055b617d776fc05a1a1cc69e09fe9ff"},
+ {file = "scipy-1.16.2-cp314-cp314-win_arm64.whl", hash = "sha256:2cc73a33305b4b24556957d5857d6253ce1e2dcd67fa0ff46d87d1670b3e1e1d"},
+ {file = "scipy-1.16.2-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:9ea2a3fed83065d77367775d689401a703d0f697420719ee10c0780bcab594d8"},
+ {file = "scipy-1.16.2-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:7280d926f11ca945c3ef92ba960fa924e1465f8d07ce3a9923080363390624c4"},
+ {file = "scipy-1.16.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:8afae1756f6a1fe04636407ef7dbece33d826a5d462b74f3d0eb82deabefd831"},
+ {file = "scipy-1.16.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:5c66511f29aa8d233388e7416a3f20d5cae7a2744d5cee2ecd38c081f4e861b3"},
+ {file = "scipy-1.16.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:efe6305aeaa0e96b0ccca5ff647a43737d9a092064a3894e46c414db84bc54ac"},
+ {file = "scipy-1.16.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7f3a337d9ae06a1e8d655ee9d8ecb835ea5ddcdcbd8d23012afa055ab014f374"},
+ {file = "scipy-1.16.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bab3605795d269067d8ce78a910220262711b753de8913d3deeaedb5dded3bb6"},
+ {file = "scipy-1.16.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b0348d8ddb55be2a844c518cd8cc8deeeb8aeba707cf834db5758fc89b476a2c"},
+ {file = "scipy-1.16.2-cp314-cp314t-win_amd64.whl", hash = "sha256:26284797e38b8a75e14ea6631d29bda11e76ceaa6ddb6fdebbfe4c4d90faf2f9"},
+ {file = "scipy-1.16.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d2a4472c231328d4de38d5f1f68fdd6d28a615138f842580a8a321b5845cf779"},
+ {file = "scipy-1.16.2.tar.gz", hash = "sha256:af029b153d243a80afb6eabe40b0a07f8e35c9adc269c019f364ad747f826a6b"},
]
[package.dependencies]
@@ -1695,7 +2734,7 @@ numpy = ">=1.25.2,<2.6"
[package.extras]
dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"]
doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.19.1)", "jupytext", "linkify-it-py", "matplotlib (>=3.5)", "myst-nb (>=1.2.0)", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.2.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"]
-test = ["Cython", "array-api-strict (>=2.3.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
+test = ["Cython", "array-api-strict (>=2.3.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest (>=8.0.0)", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
[[package]]
name = "selenium"
@@ -1851,14 +2890,14 @@ sqlcipher = ["sqlcipher3_binary"]
[[package]]
name = "starlette"
-version = "0.47.3"
+version = "0.48.0"
description = "The little ASGI library that shines."
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
- {file = "starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51"},
- {file = "starlette-0.47.3.tar.gz", hash = "sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9"},
+ {file = "starlette-0.48.0-py3-none-any.whl", hash = "sha256:0764ca97b097582558ecb498132ed0c7d942f233f365b86ba37770e026510659"},
+ {file = "starlette-0.48.0.tar.gz", hash = "sha256:7e8cee469a8ab2352911528110ce9088fdc6a37d9876926e73da7ce4aa4c7a46"},
]
[package.dependencies]
@@ -1868,6 +2907,24 @@ typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\""
[package.extras]
full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"]
+[[package]]
+name = "sympy"
+version = "1.14.0"
+description = "Computer algebra system (CAS) in Python"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5"},
+ {file = "sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517"},
+]
+
+[package.dependencies]
+mpmath = ">=1.1.0,<1.4"
+
+[package.extras]
+dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"]
+
[[package]]
name = "threadpoolctl"
version = "3.6.0"
@@ -1880,6 +2937,136 @@ files = [
{file = "threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e"},
]
+[[package]]
+name = "tokenizers"
+version = "0.22.0"
+description = ""
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "tokenizers-0.22.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:eaa9620122a3fb99b943f864af95ed14c8dfc0f47afa3b404ac8c16b3f2bb484"},
+ {file = "tokenizers-0.22.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:71784b9ab5bf0ff3075bceeb198149d2c5e068549c0d18fe32d06ba0deb63f79"},
+ {file = "tokenizers-0.22.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec5b71f668a8076802b0241a42387d48289f25435b86b769ae1837cad4172a17"},
+ {file = "tokenizers-0.22.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ea8562fa7498850d02a16178105b58803ea825b50dc9094d60549a7ed63654bb"},
+ {file = "tokenizers-0.22.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4136e1558a9ef2e2f1de1555dcd573e1cbc4a320c1a06c4107a3d46dc8ac6e4b"},
+ {file = "tokenizers-0.22.0-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf5954de3962a5fd9781dc12048d24a1a6f1f5df038c6e95db328cd22964206"},
+ {file = "tokenizers-0.22.0-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8337ca75d0731fc4860e6204cc24bb36a67d9736142aa06ed320943b50b1e7ed"},
+ {file = "tokenizers-0.22.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a89264e26f63c449d8cded9061adea7b5de53ba2346fc7e87311f7e4117c1cc8"},
+ {file = "tokenizers-0.22.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:790bad50a1b59d4c21592f9c3cf5e5cf9c3c7ce7e1a23a739f13e01fb1be377a"},
+ {file = "tokenizers-0.22.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:76cf6757c73a10ef10bf06fa937c0ec7393d90432f543f49adc8cab3fb6f26cb"},
+ {file = "tokenizers-0.22.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:1626cb186e143720c62c6c6b5371e62bbc10af60481388c0da89bc903f37ea0c"},
+ {file = "tokenizers-0.22.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:da589a61cbfea18ae267723d6b029b84598dc8ca78db9951d8f5beff72d8507c"},
+ {file = "tokenizers-0.22.0-cp39-abi3-win32.whl", hash = "sha256:dbf9d6851bddae3e046fedfb166f47743c1c7bd11c640f0691dd35ef0bcad3be"},
+ {file = "tokenizers-0.22.0-cp39-abi3-win_amd64.whl", hash = "sha256:c78174859eeaee96021f248a56c801e36bfb6bd5b067f2e95aa82445ca324f00"},
+ {file = "tokenizers-0.22.0.tar.gz", hash = "sha256:2e33b98525be8453f355927f3cab312c36cd3e44f4d7e9e97da2fa94d0a49dcb"},
+]
+
+[package.dependencies]
+huggingface-hub = ">=0.16.4,<1.0"
+
+[package.extras]
+dev = ["tokenizers[testing]"]
+docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"]
+testing = ["black (==22.3)", "datasets", "numpy", "pytest", "pytest-asyncio", "requests", "ruff"]
+
+[[package]]
+name = "tqdm"
+version = "4.67.1"
+description = "Fast, Extensible Progress Meter"
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"},
+ {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"]
+discord = ["requests"]
+notebook = ["ipywidgets (>=6)"]
+slack = ["slack-sdk"]
+telegram = ["requests"]
+
+[[package]]
+name = "transformers"
+version = "4.56.1"
+description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow"
+optional = false
+python-versions = ">=3.9.0"
+groups = ["main"]
+files = [
+ {file = "transformers-4.56.1-py3-none-any.whl", hash = "sha256:1697af6addfb6ddbce9618b763f4b52d5a756f6da4899ffd1b4febf58b779248"},
+ {file = "transformers-4.56.1.tar.gz", hash = "sha256:0d88b1089a563996fc5f2c34502f10516cad3ea1aa89f179f522b54c8311fe74"},
+]
+
+[package.dependencies]
+filelock = "*"
+huggingface-hub = ">=0.34.0,<1.0"
+numpy = ">=1.17"
+packaging = ">=20.0"
+pyyaml = ">=5.1"
+regex = "!=2019.12.17"
+requests = "*"
+safetensors = ">=0.4.3"
+tokenizers = ">=0.22.0,<=0.23.0"
+tqdm = ">=4.27"
+
+[package.extras]
+accelerate = ["accelerate (>=0.26.0)"]
+all = ["Pillow (>=10.0.1,<=15.0)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "accelerate (>=0.26.0)", "av", "codecarbon (>=2.8.1)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "jinja2 (>=3.1.0)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "kernels (>=0.6.1,<=0.9)", "librosa", "mistral-common[opencv] (>=1.6.3)", "num2words", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision"]
+audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"]
+benchmark = ["optimum-benchmark (>=0.3.0)"]
+chat-template = ["jinja2 (>=3.1.0)"]
+codecarbon = ["codecarbon (>=2.8.1)"]
+deepspeed = ["accelerate (>=0.26.0)", "deepspeed (>=0.9.3)"]
+deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.26.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "libcst", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "optuna", "parameterized (>=0.9)", "protobuf", "psutil", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"]
+dev = ["GitPython (<3.1.19)", "GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "accelerate (>=0.26.0)", "av", "beautifulsoup4", "codecarbon (>=2.8.1)", "cookiecutter (==1.7.3)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "jinja2 (>=3.1.0)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "kernels (>=0.6.1,<=0.9)", "libcst", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "num2words", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict_core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic_lite (>=1.0.7)", "urllib3 (<2.0.0)"]
+dev-tensorflow = ["GitPython (<3.1.19)", "GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "libcst", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "onnxconverter-common", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "tf2onnx", "timeout-decorator", "tokenizers (>=0.22.0,<=0.23.0)", "urllib3 (<2.0.0)"]
+dev-torch = ["GitPython (<3.1.19)", "GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "beautifulsoup4", "codecarbon (>=2.8.1)", "cookiecutter (==1.7.3)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "kenlm", "kernels (>=0.6.1,<=0.9)", "libcst", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "num2words", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict_core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic_lite (>=1.0.7)", "urllib3 (<2.0.0)"]
+flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)", "scipy (<1.13.0)"]
+flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"]
+ftfy = ["ftfy"]
+hf-xet = ["hf_xet"]
+hub-kernels = ["kernels (>=0.6.1,<=0.9)"]
+integrations = ["kernels (>=0.6.1,<=0.9)", "optuna", "ray[tune] (>=2.7.0)", "sigopt"]
+ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict_core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic_lite (>=1.0.7)"]
+mistral-common = ["mistral-common[opencv] (>=1.6.3)"]
+modelcreation = ["cookiecutter (==1.7.3)"]
+natten = ["natten (>=0.14.6,<0.15.0)"]
+num2words = ["num2words"]
+onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"]
+onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"]
+open-telemetry = ["opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"]
+optuna = ["optuna"]
+quality = ["GitPython (<3.1.19)", "datasets (>=2.15.0)", "libcst", "pandas (<2.3.0)", "rich", "ruff (==0.11.2)", "urllib3 (<2.0.0)"]
+ray = ["ray[tune] (>=2.7.0)"]
+retrieval = ["datasets (>=2.15.0)", "faiss-cpu"]
+ruff = ["ruff (==0.11.2)"]
+sagemaker = ["sagemaker (>=2.31.0)"]
+sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"]
+serving = ["accelerate (>=0.26.0)", "fastapi", "openai (>=1.98.0)", "pydantic (>=2)", "starlette", "torch (>=2.2)", "uvicorn"]
+sigopt = ["sigopt"]
+sklearn = ["scikit-learn"]
+speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"]
+testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "libcst", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "parameterized (>=0.9)", "psutil", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"]
+tf = ["keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"]
+tf-cpu = ["keras (>2.9,<2.16)", "keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow-cpu (>2.9,<2.16)", "tensorflow-probability (<0.24)", "tensorflow-text (<2.16)", "tf2onnx"]
+tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"]
+tiktoken = ["blobfile", "tiktoken"]
+timm = ["timm (!=1.0.18,<=1.0.19)"]
+tokenizers = ["tokenizers (>=0.22.0,<=0.23.0)"]
+torch = ["accelerate (>=0.26.0)", "torch (>=2.2)"]
+torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"]
+torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"]
+torchhub = ["filelock", "huggingface-hub (>=0.34.0,<1.0)", "importlib_metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "tqdm (>=4.27)"]
+video = ["av"]
+vision = ["Pillow (>=10.0.1,<=15.0)"]
+
[[package]]
name = "trio"
version = "0.30.0"
@@ -2044,7 +3231,126 @@ files = [
[package.dependencies]
h11 = ">=0.9.0,<1"
+[[package]]
+name = "yarl"
+version = "1.20.1"
+description = "Yet another URL library"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4"},
+ {file = "yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a"},
+ {file = "yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed"},
+ {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e"},
+ {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73"},
+ {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e"},
+ {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8"},
+ {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23"},
+ {file = "yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70"},
+ {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb"},
+ {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2"},
+ {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30"},
+ {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309"},
+ {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24"},
+ {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13"},
+ {file = "yarl-1.20.1-cp310-cp310-win32.whl", hash = "sha256:6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8"},
+ {file = "yarl-1.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16"},
+ {file = "yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e"},
+ {file = "yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b"},
+ {file = "yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b"},
+ {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4"},
+ {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1"},
+ {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833"},
+ {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d"},
+ {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8"},
+ {file = "yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf"},
+ {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e"},
+ {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389"},
+ {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f"},
+ {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845"},
+ {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1"},
+ {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e"},
+ {file = "yarl-1.20.1-cp311-cp311-win32.whl", hash = "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773"},
+ {file = "yarl-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e"},
+ {file = "yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9"},
+ {file = "yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a"},
+ {file = "yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2"},
+ {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee"},
+ {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819"},
+ {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16"},
+ {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6"},
+ {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd"},
+ {file = "yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a"},
+ {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38"},
+ {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef"},
+ {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f"},
+ {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8"},
+ {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a"},
+ {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004"},
+ {file = "yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5"},
+ {file = "yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698"},
+ {file = "yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a"},
+ {file = "yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3"},
+ {file = "yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7"},
+ {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691"},
+ {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31"},
+ {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28"},
+ {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653"},
+ {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5"},
+ {file = "yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02"},
+ {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53"},
+ {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc"},
+ {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04"},
+ {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4"},
+ {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b"},
+ {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1"},
+ {file = "yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7"},
+ {file = "yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c"},
+ {file = "yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d"},
+ {file = "yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf"},
+ {file = "yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3"},
+ {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d"},
+ {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c"},
+ {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1"},
+ {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce"},
+ {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3"},
+ {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be"},
+ {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16"},
+ {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513"},
+ {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f"},
+ {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390"},
+ {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458"},
+ {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e"},
+ {file = "yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d"},
+ {file = "yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f"},
+ {file = "yarl-1.20.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e42ba79e2efb6845ebab49c7bf20306c4edf74a0b20fc6b2ccdd1a219d12fad3"},
+ {file = "yarl-1.20.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:41493b9b7c312ac448b7f0a42a089dffe1d6e6e981a2d76205801a023ed26a2b"},
+ {file = "yarl-1.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f5a5928ff5eb13408c62a968ac90d43f8322fd56d87008b8f9dabf3c0f6ee983"},
+ {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30c41ad5d717b3961b2dd785593b67d386b73feca30522048d37298fee981805"},
+ {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:59febc3969b0781682b469d4aca1a5cab7505a4f7b85acf6db01fa500fa3f6ba"},
+ {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d2b6fb3622b7e5bf7a6e5b679a69326b4279e805ed1699d749739a61d242449e"},
+ {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:749d73611db8d26a6281086f859ea7ec08f9c4c56cec864e52028c8b328db723"},
+ {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9427925776096e664c39e131447aa20ec738bdd77c049c48ea5200db2237e000"},
+ {file = "yarl-1.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff70f32aa316393eaf8222d518ce9118148eddb8a53073c2403863b41033eed5"},
+ {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c7ddf7a09f38667aea38801da8b8d6bfe81df767d9dfc8c88eb45827b195cd1c"},
+ {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:57edc88517d7fc62b174fcfb2e939fbc486a68315d648d7e74d07fac42cec240"},
+ {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:dab096ce479d5894d62c26ff4f699ec9072269d514b4edd630a393223f45a0ee"},
+ {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14a85f3bd2d7bb255be7183e5d7d6e70add151a98edf56a770d6140f5d5f4010"},
+ {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c89b5c792685dd9cd3fa9761c1b9f46fc240c2a3265483acc1565769996a3f8"},
+ {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:69e9b141de5511021942a6866990aea6d111c9042235de90e08f94cf972ca03d"},
+ {file = "yarl-1.20.1-cp39-cp39-win32.whl", hash = "sha256:b5f307337819cdfdbb40193cad84978a029f847b0a357fbe49f712063cfc4f06"},
+ {file = "yarl-1.20.1-cp39-cp39-win_amd64.whl", hash = "sha256:eae7bfe2069f9c1c5b05fc7fe5d612e5bbc089a39309904ee8b829e322dcad00"},
+ {file = "yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77"},
+ {file = "yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac"},
+]
+
+[package.dependencies]
+idna = ">=2.0"
+multidict = ">=4.0"
+propcache = ">=0.2.1"
+
[metadata]
lock-version = "2.1"
python-versions = ">=3.11,<3.14"
-content-hash = "4977aa61a86e0ad28813ae35592d3911f058b2893f46b87677d7a77b61d9d06f"
+content-hash = "6e10697924e89b5c0f7c3f6ecd79fb64ac412ac2240f75565d6ea5feb3a89a20"
diff --git a/apps/pre-processing-service/pyproject.toml b/apps/pre-processing-service/pyproject.toml
index 93b6c1da..672bf645 100644
--- a/apps/pre-processing-service/pyproject.toml
+++ b/apps/pre-processing-service/pyproject.toml
@@ -5,15 +5,8 @@ description = ""
authors = [
{name = "skip"}
]
-readme = "README.md"
requires-python = ">=3.11,<3.14"
-
-#[[tool.poetry.source]]
-#name = "pytorch"
-#url = "https://download.pytorch.org/whl/cpu"
-#priority = "explicit"
-
[tool.poetry.dependencies]
python = ">=3.11,<3.14"
fastapi = ">=0.116.1,<0.117.0"
@@ -26,9 +19,8 @@ gunicorn = ">=23.0.0,<24.0.0"
requests = ">=2.32.5,<3.0.0"
bs4 = ">=0.0.2,<0.0.3"
selenium = ">=4.35.0,<5.0.0"
-#transformers = ">=4.56.0,<5.0.0"
+transformers = ">=4.56.0,<5.0.0"
numpy = ">=2.3.2,<3.0.0"
-#torch = ">=2.8.0,<3.0.0"
scikit-learn = ">=1.7.1,<2.0.0"
python-dotenv = ">=1.1.1,<2.0.0"
mecab-python3 = ">=1.0.10,<2.0.0"
@@ -41,6 +33,9 @@ google-auth-oauthlib = "^1.2.2"
google-api-python-client = "^2.181.0"
poetry-core=">=2.1.3,<3.0.0"
dbutils=">=3.1.2,<4.0.0"
+onnxruntime = "^1.22.1"
+openai = "^1.107.3"
+aiohttp = "^3.12.15"
[build-system]
requires = ["poetry-core>=2.0.0,<3.0.0"]
diff --git a/apps/user-service/build.gradle b/apps/user-service/build.gradle
index 624067f6..096e6d65 100644
--- a/apps/user-service/build.gradle
+++ b/apps/user-service/build.gradle
@@ -44,19 +44,24 @@ dependencies {
// MyBatis
implementation 'org.mybatis.spring.boot:mybatis-spring-boot-starter:3.0.5'
- // batch
- implementation 'org.springframework.boot:spring-boot-starter-batch'
+ // Scheduler
+ implementation 'org.springframework.boot:spring-boot-starter-quartz'
- // Log4j2 - 모든 모듈을 2.22.1로 통일
implementation 'org.springframework.boot:spring-boot-starter-log4j2'
- implementation 'org.apache.logging.log4j:log4j-core:2.22.1'
- implementation 'org.apache.logging.log4j:log4j-api:2.22.1'
- implementation 'org.apache.logging.log4j:log4j-slf4j2-impl:2.22.1'
- implementation 'org.apache.logging.log4j:log4j-jul:2.22.1'
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml'
+ implementation 'org.apache.logging.log4j:log4j-layout-template-json'
+ implementation 'pl.tkowalcz.tjahzi:log4j2-appender-nodep:0.9.17'
+ implementation 'org.apache.httpcomponents:httpclient:4.5.14'
+ implementation 'org.apache.httpcomponents:httpcore:4.4.16'
// 비동기 로깅
- implementation 'com.lmax:disruptor:3.4.4'
+// implementation 'com.lmax:disruptor:3.4.4'
+// implementation 'org.apache.commons:commons-dbcp2'
+// implementation 'org.apache.commons:commons-pool2'
+
+ implementation "io.micrometer:micrometer-tracing-bridge-brave"
+ implementation "io.micrometer:micrometer-tracing"
+ implementation "org.springframework.boot:spring-boot-starter-actuator"
// Lombok
compileOnly 'org.projectlombok:lombok:1.18.30'
diff --git a/apps/user-service/src/main/java/site/icebang/UserServiceApplication.java b/apps/user-service/src/main/java/site/icebang/UserServiceApplication.java
index 68da9f2a..29e975ba 100644
--- a/apps/user-service/src/main/java/site/icebang/UserServiceApplication.java
+++ b/apps/user-service/src/main/java/site/icebang/UserServiceApplication.java
@@ -1,13 +1,9 @@
package site.icebang;
import org.mybatis.spring.annotation.MapperScan;
-import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
-import org.springframework.scheduling.annotation.EnableScheduling;
-@EnableScheduling
-@EnableBatchProcessing
@SpringBootApplication
@MapperScan("site.icebang.**.mapper")
public class UserServiceApplication {
diff --git a/apps/user-service/src/main/java/site/icebang/batch/job/BlogContentJobConfig.java b/apps/user-service/src/main/java/site/icebang/batch/job/BlogContentJobConfig.java
deleted file mode 100644
index 5e85fe9f..00000000
--- a/apps/user-service/src/main/java/site/icebang/batch/job/BlogContentJobConfig.java
+++ /dev/null
@@ -1,51 +0,0 @@
-package site.icebang.batch.job;
-
-import org.springframework.batch.core.Job;
-import org.springframework.batch.core.Step;
-import org.springframework.batch.core.job.builder.JobBuilder;
-import org.springframework.batch.core.repository.JobRepository;
-import org.springframework.batch.core.step.builder.StepBuilder;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.transaction.PlatformTransactionManager;
-
-import lombok.RequiredArgsConstructor;
-
-import site.icebang.batch.tasklet.ContentGenerationTasklet;
-import site.icebang.batch.tasklet.KeywordExtractionTasklet;
-
-@Configuration
-@RequiredArgsConstructor
-public class BlogContentJobConfig {
-
- // 변경점 1: Factory 대신 실제 Tasklet만 필드로 주입받습니다.
- private final KeywordExtractionTasklet keywordExtractionTasklet;
- private final ContentGenerationTasklet contentGenerationTasklet;
-
- @Bean
- public Job blogContentJob(
- JobRepository jobRepository, Step keywordExtractionStep, Step contentGenerationStep) {
- return new JobBuilder("blogContentJob", jobRepository) // 변경점 2: JobBuilder를 직접 생성합니다.
- .start(keywordExtractionStep)
- .next(contentGenerationStep)
- .build();
- }
-
- @Bean
- public Step keywordExtractionStep(
- JobRepository jobRepository, PlatformTransactionManager transactionManager) {
- return new StepBuilder("keywordExtractionStep", jobRepository) // 변경점 3: StepBuilder를 직접 생성합니다.
- .tasklet(
- keywordExtractionTasklet,
- transactionManager) // 변경점 4: tasklet에 transactionManager를 함께 전달합니다.
- .build();
- }
-
- @Bean
- public Step contentGenerationStep(
- JobRepository jobRepository, PlatformTransactionManager transactionManager) {
- return new StepBuilder("contentGenerationStep", jobRepository)
- .tasklet(contentGenerationTasklet, transactionManager)
- .build();
- }
-}
diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/ContentGenerationTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/ContentGenerationTasklet.java
deleted file mode 100644
index a6ef4505..00000000
--- a/apps/user-service/src/main/java/site/icebang/batch/tasklet/ContentGenerationTasklet.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package site.icebang.batch.tasklet;
-
-import java.util.List;
-
-import org.springframework.batch.core.StepContribution;
-import org.springframework.batch.core.scope.context.ChunkContext;
-import org.springframework.batch.core.step.tasklet.Tasklet;
-import org.springframework.batch.item.ExecutionContext;
-import org.springframework.batch.repeat.RepeatStatus;
-import org.springframework.stereotype.Component;
-
-import lombok.RequiredArgsConstructor;
-import lombok.extern.slf4j.Slf4j;
-
-@Slf4j
-@Component
-@RequiredArgsConstructor
-public class ContentGenerationTasklet implements Tasklet {
-
- // private final ContentService contentService; // 비즈니스 로직을 담은 서비스
- // private final FastApiClient fastApiClient; // FastAPI 통신을 위한 클라이언트
-
- @Override
- public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext)
- throws Exception {
- log.info(">>>> [Step 2] ContentGenerationTasklet executed.");
-
- // --- 핵심: JobExecutionContext에서 이전 Step의 결과물 가져오기 ---
- ExecutionContext jobExecutionContext =
- chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext();
-
- // KeywordExtractionTasklet이 저장한 "extractedKeywordIds" Key로 데이터 조회
- List keywordIds = (List) jobExecutionContext.get("extractedKeywordIds");
-
- if (keywordIds == null || keywordIds.isEmpty()) {
- log.warn(">>>> No keyword IDs found from previous step. Skipping content generation.");
- return RepeatStatus.FINISHED;
- }
-
- log.info(">>>> Received Keyword IDs for content generation: {}", keywordIds);
-
- // TODO: 1. 전달받은 키워드 ID 목록으로 DB에서 상세 정보 조회
- // TODO: 2. 각 키워드/상품 정보에 대해 외부 AI 서비스(FastAPI/LangChain)를 호출하여 콘텐츠 생성을 요청
- // TODO: 3. 생성된 콘텐츠를 DB에 저장
-
- log.info(">>>> [Step 2] ContentGenerationTasklet finished.");
- return RepeatStatus.FINISHED;
- }
-}
diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/KeywordExtractionTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/KeywordExtractionTasklet.java
deleted file mode 100644
index ebc27117..00000000
--- a/apps/user-service/src/main/java/site/icebang/batch/tasklet/KeywordExtractionTasklet.java
+++ /dev/null
@@ -1,47 +0,0 @@
-package site.icebang.batch.tasklet;
-
-import java.util.List;
-
-import org.springframework.batch.core.StepContribution;
-import org.springframework.batch.core.scope.context.ChunkContext;
-import org.springframework.batch.core.step.tasklet.Tasklet;
-import org.springframework.batch.item.ExecutionContext;
-import org.springframework.batch.repeat.RepeatStatus;
-import org.springframework.stereotype.Component;
-
-import lombok.RequiredArgsConstructor;
-import lombok.extern.slf4j.Slf4j;
-
-@Slf4j
-@Component
-@RequiredArgsConstructor
-public class KeywordExtractionTasklet implements Tasklet {
-
- // private final TrendKeywordService trendKeywordService; // 비즈니스 로직을 담은 서비스
-
- @Override
- public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext)
- throws Exception {
- log.info(">>>> [Step 1] KeywordExtractionTasklet executed.");
-
- // TODO: 1. DB에서 카테고리 정보 조회
- // TODO: 2. 외부 API 또는 내부 로직을 통해 트렌드 키워드 추출
- // TODO: 3. 추출된 키워드를 DB에 저장
-
- // --- 핵심: 다음 Step에 전달할 데이터 생성 ---
- // 예시: 새로 생성된 키워드 ID 목록을 가져왔다고 가정
- List extractedKeywordIds = List.of(1L, 2L, 3L); // 실제로는 DB 저장 후 반환된 ID 목록
- log.info(">>>> Extracted Keyword IDs: {}", extractedKeywordIds);
-
- // --- 핵심: JobExecutionContext에 결과물 저장 ---
- // JobExecution 전체에서 공유되는 컨텍스트를 가져옵니다.
- ExecutionContext jobExecutionContext =
- chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext();
-
- // "extractedKeywordIds" 라는 Key로 데이터 저장
- jobExecutionContext.put("extractedKeywordIds", extractedKeywordIds);
-
- log.info(">>>> [Step 1] KeywordExtractionTasklet finished.");
- return RepeatStatus.FINISHED;
- }
-}
diff --git a/apps/user-service/src/main/java/site/icebang/common/dto/PageParams.java b/apps/user-service/src/main/java/site/icebang/common/dto/PageParams.java
new file mode 100644
index 00000000..5f2f0d30
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/common/dto/PageParams.java
@@ -0,0 +1,25 @@
+package site.icebang.common.dto;
+
+import lombok.Data;
+
+@Data
+public class PageParams {
+ private int current = 1;
+ private int pageSize = 10;
+ private String search;
+ private String[] sorters;
+ private String[] filters;
+
+ // 계산된 offset
+ public int getOffset() {
+ return (current - 1) * pageSize;
+ }
+
+ public boolean hasSearch() {
+ return search != null && !search.trim().isEmpty();
+ }
+
+ public boolean hasSorters() {
+ return sorters != null && sorters.length > 0;
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/common/dto/PageResult.java b/apps/user-service/src/main/java/site/icebang/common/dto/PageResult.java
new file mode 100644
index 00000000..4a2a8bfa
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/common/dto/PageResult.java
@@ -0,0 +1,77 @@
+package site.icebang.common.dto;
+
+import java.util.List;
+import java.util.function.Supplier;
+
+import lombok.Data;
+import lombok.NoArgsConstructor;
+
+@Data
+@NoArgsConstructor
+public class PageResult {
+ private List data;
+ private int total;
+ private int current;
+ private int pageSize;
+ private int totalPages;
+ private boolean hasNext;
+ private boolean hasPrevious;
+
+ public PageResult(List data, int total, int current, int pageSize) {
+ this.data = data;
+ this.total = total;
+ this.current = current;
+ this.pageSize = pageSize;
+ calculatePagination();
+ }
+
+ // 페이징 계산 로직 분리
+ private void calculatePagination() {
+ this.totalPages = total > 0 ? (int) Math.ceil((double) total / pageSize) : 0;
+ this.hasNext = current < totalPages;
+ this.hasPrevious = current > 1;
+ }
+
+ // 기존 of 메서드
+ public static PageResult of(List data, int total, int current, int pageSize) {
+ return new PageResult<>(data, total, current, pageSize);
+ }
+
+ // PageParams를 받는 of 메서드
+ public static PageResult of(List data, int total, PageParams pageParams) {
+ return new PageResult<>(data, total, pageParams.getCurrent(), pageParams.getPageSize());
+ }
+
+ // 함수형 인터페이스를 활용한 from 메서드 (트랜잭션 내에서 실행)
+ public static PageResult from(
+ PageParams pageParams, Supplier> dataSupplier, Supplier countSupplier) {
+ List data = dataSupplier.get();
+ int total = countSupplier.get();
+ return new PageResult<>(data, total, pageParams.getCurrent(), pageParams.getPageSize());
+ }
+
+ // 빈 페이지 결과 생성
+ public static PageResult empty(PageParams pageParams) {
+ return new PageResult<>(List.of(), 0, pageParams.getCurrent(), pageParams.getPageSize());
+ }
+
+ // 빈 페이지 결과 생성 (기본값)
+ public static PageResult empty() {
+ return new PageResult<>(List.of(), 0, 1, 10);
+ }
+
+ // 데이터가 있는지 확인
+ public boolean hasData() {
+ return data != null && !data.isEmpty();
+ }
+
+ // 첫 번째 페이지인지 확인
+ public boolean isFirstPage() {
+ return current == 1;
+ }
+
+ // 마지막 페이지인지 확인
+ public boolean isLastPage() {
+ return current == totalPages;
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/common/exception/DuplicateDataException.java b/apps/user-service/src/main/java/site/icebang/common/exception/DuplicateDataException.java
new file mode 100644
index 00000000..e673ab86
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/common/exception/DuplicateDataException.java
@@ -0,0 +1,25 @@
+package site.icebang.common.exception;
+
+public class DuplicateDataException extends RuntimeException {
+
+ public DuplicateDataException() {
+ super();
+ }
+
+ public DuplicateDataException(String message) {
+ super(message);
+ }
+
+ public DuplicateDataException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+ public DuplicateDataException(Throwable cause) {
+ super(cause);
+ }
+
+ protected DuplicateDataException(
+ String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+ super(message, cause, enableSuppression, writableStackTrace);
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/common/service/PageableService.java b/apps/user-service/src/main/java/site/icebang/common/service/PageableService.java
new file mode 100644
index 00000000..25d41d29
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/common/service/PageableService.java
@@ -0,0 +1,8 @@
+package site.icebang.common.service;
+
+import site.icebang.common.dto.PageParams;
+import site.icebang.common.dto.PageResult;
+
+public interface PageableService {
+ PageResult getPagedResult(PageParams pageParams);
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/WorkflowLogInsertExampleController.java b/apps/user-service/src/main/java/site/icebang/domain/WorkflowLogInsertExampleController.java
new file mode 100644
index 00000000..c3e225b7
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/WorkflowLogInsertExampleController.java
@@ -0,0 +1,34 @@
+package site.icebang.domain;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+import lombok.extern.slf4j.Slf4j;
+
+import site.icebang.common.dto.ApiResponse;
+
+@RestController
+@RequestMapping("/v0/check-execution-log-insert")
+@Slf4j
+public class WorkflowLogInsertExampleController {
+ private static final Logger workflowLogger = LoggerFactory.getLogger("WORKFLOW_HISTORY");
+
+ @GetMapping("")
+ public ApiResponse test() {
+ log.info("@@");
+ // MDC.put("traceId", UUID.randomUUID().toString());
+ MDC.put("sourceId", "o1");
+ MDC.put("executionType", "WORKFLOW");
+ // MDC.put("sourceId", "test-controller");
+
+ // 이 로그는 DB에 저장됨
+ workflowLogger.info("SLF4J로 찍은 워크플로우 로그");
+
+ MDC.clear();
+ return ApiResponse.success("hi");
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/auth/controller/AuthController.java b/apps/user-service/src/main/java/site/icebang/domain/auth/controller/AuthController.java
index d0a98142..2303cf74 100644
--- a/apps/user-service/src/main/java/site/icebang/domain/auth/controller/AuthController.java
+++ b/apps/user-service/src/main/java/site/icebang/domain/auth/controller/AuthController.java
@@ -61,4 +61,18 @@ public ApiResponse checkSession(@AuthenticationPrincipal AuthCredential
public ApiResponse getPermissions(@AuthenticationPrincipal AuthCredential user) {
return ApiResponse.success(user);
}
+
+ @PostMapping("/logout")
+ public ApiResponse logout(HttpServletRequest request) {
+ // SecurityContext 정리
+ SecurityContextHolder.clearContext();
+
+ // 세션 무효화
+ HttpSession session = request.getSession(false);
+ if (session != null) {
+ session.invalidate();
+ }
+
+ return ApiResponse.success(null);
+ }
}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthService.java b/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthService.java
index 091861b2..25a5bd42 100644
--- a/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthService.java
+++ b/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthService.java
@@ -6,6 +6,7 @@
import lombok.RequiredArgsConstructor;
+import site.icebang.common.exception.DuplicateDataException;
import site.icebang.common.utils.RandomPasswordGenerator;
import site.icebang.domain.auth.dto.RegisterDto;
import site.icebang.domain.auth.mapper.AuthMapper;
@@ -23,7 +24,7 @@ public class AuthService {
public void registerUser(RegisterDto registerDto) {
if (authMapper.existsByEmail(registerDto.getEmail())) {
- throw new IllegalArgumentException("이미 가입된 이메일입니다.");
+ throw new DuplicateDataException("이미 가입된 이메일입니다.");
}
String randomPassword = passwordGenerator.generate();
String hashedPassword = passwordEncoder.encode(randomPassword);
diff --git a/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/JobRunMapper.java b/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/JobRunMapper.java
new file mode 100644
index 00000000..d5ce7e8f
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/JobRunMapper.java
@@ -0,0 +1,12 @@
+package site.icebang.domain.execution.mapper;
+
+import org.apache.ibatis.annotations.Mapper;
+
+import site.icebang.domain.execution.model.JobRun;
+
+@Mapper
+public interface JobRunMapper {
+ void insert(JobRun jobRun);
+
+ void update(JobRun jobRun);
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/TaskRunMapper.java b/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/TaskRunMapper.java
new file mode 100644
index 00000000..646a7c91
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/TaskRunMapper.java
@@ -0,0 +1,12 @@
+package site.icebang.domain.execution.mapper;
+
+import org.apache.ibatis.annotations.Mapper;
+
+import site.icebang.domain.execution.model.TaskRun;
+
+@Mapper
+public interface TaskRunMapper {
+ void insert(TaskRun taskRun);
+
+ void update(TaskRun taskRun);
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/WorkflowRunMapper.java b/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/WorkflowRunMapper.java
new file mode 100644
index 00000000..776ec4b0
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/WorkflowRunMapper.java
@@ -0,0 +1,12 @@
+package site.icebang.domain.execution.mapper;
+
+import org.apache.ibatis.annotations.Mapper;
+
+import site.icebang.domain.execution.model.WorkflowRun;
+
+@Mapper
+public interface WorkflowRunMapper {
+ void insert(WorkflowRun workflowRun);
+
+ void update(WorkflowRun workflowRun);
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/execution/model/JobRun.java b/apps/user-service/src/main/java/site/icebang/domain/execution/model/JobRun.java
new file mode 100644
index 00000000..f5310f12
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/execution/model/JobRun.java
@@ -0,0 +1,38 @@
+package site.icebang.domain.execution.model;
+
+import java.time.LocalDateTime;
+
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+
+@Getter
+@NoArgsConstructor
+public class JobRun {
+
+ private Long id;
+ private Long workflowRunId;
+ private Long jobId;
+ private String status; // PENDING, RUNNING, SUCCESS, FAILED
+ private LocalDateTime startedAt;
+ private LocalDateTime finishedAt;
+ private LocalDateTime createdAt;
+
+ private JobRun(Long workflowRunId, Long jobId) {
+ this.workflowRunId = workflowRunId;
+ this.jobId = jobId;
+ this.status = "RUNNING";
+ this.startedAt = LocalDateTime.now();
+ this.createdAt = this.startedAt;
+ }
+
+ /** Job 실행 시작을 위한 정적 팩토리 메소드 */
+ public static JobRun start(Long workflowRunId, Long jobId) {
+ return new JobRun(workflowRunId, jobId);
+ }
+
+ /** Job 실행 완료 처리 */
+ public void finish(String status) {
+ this.status = status;
+ this.finishedAt = LocalDateTime.now();
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/execution/model/TaskRun.java b/apps/user-service/src/main/java/site/icebang/domain/execution/model/TaskRun.java
new file mode 100644
index 00000000..f1ae2239
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/execution/model/TaskRun.java
@@ -0,0 +1,43 @@
+package site.icebang.domain.execution.model;
+
+import java.time.LocalDateTime;
+
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+
+@Getter
+@NoArgsConstructor
+public class TaskRun {
+
+ private Long id;
+ private Long jobRunId;
+ private Long taskId;
+ private String status; // PENDING, RUNNING, SUCCESS, FAILED
+ private String resultMessage; // 실행 결과 메시지
+ private LocalDateTime startedAt;
+ private LocalDateTime finishedAt;
+ private LocalDateTime createdAt;
+
+ // 생성자나 정적 팩토리 메서드를 통해 객체 생성 로직을 관리
+ private TaskRun(Long jobRunId, Long taskId) {
+ this.jobRunId = jobRunId;
+ this.taskId = taskId;
+ this.status = "PENDING";
+ this.createdAt = LocalDateTime.now();
+ }
+
+ /** Task 실행 시작을 위한 정적 팩토리 메서드 */
+ public static TaskRun start(Long jobRunId, Long taskId) {
+ TaskRun taskRun = new TaskRun(jobRunId, taskId);
+ taskRun.status = "RUNNING";
+ taskRun.startedAt = LocalDateTime.now();
+ return taskRun;
+ }
+
+ /** Task 실행 완료 처리 */
+ public void finish(String status, String resultMessage) {
+ this.status = status;
+ this.resultMessage = resultMessage;
+ this.finishedAt = LocalDateTime.now();
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/execution/model/WorkflowRun.java b/apps/user-service/src/main/java/site/icebang/domain/execution/model/WorkflowRun.java
new file mode 100644
index 00000000..6bd5dbc9
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/execution/model/WorkflowRun.java
@@ -0,0 +1,39 @@
+package site.icebang.domain.execution.model;
+
+import java.time.LocalDateTime;
+import java.util.UUID;
+
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+
+@Getter
+@NoArgsConstructor
+public class WorkflowRun {
+
+ private Long id;
+ private Long workflowId;
+ private String traceId; // 분산 추적을 위한 ID
+ private String status; // PENDING, RUNNING, SUCCESS, FAILED
+ private LocalDateTime startedAt;
+ private LocalDateTime finishedAt;
+ private LocalDateTime createdAt;
+
+ private WorkflowRun(Long workflowId) {
+ this.workflowId = workflowId;
+ this.traceId = UUID.randomUUID().toString(); // 고유 추적 ID 생성
+ this.status = "RUNNING";
+ this.startedAt = LocalDateTime.now();
+ this.createdAt = this.startedAt;
+ }
+
+ /** 워크플로우 실행 시작을 위한 정적 팩토리 메소드 */
+ public static WorkflowRun start(Long workflowId) {
+ return new WorkflowRun(workflowId);
+ }
+
+ /** 워크플로우 실행 완료 처리 */
+ public void finish(String status) {
+ this.status = status;
+ this.finishedAt = LocalDateTime.now();
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java
index c757fc36..12567a60 100644
--- a/apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java
+++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java
@@ -8,5 +8,5 @@
@Mapper
public interface ScheduleMapper {
- List findAllByIsActive(boolean isActive);
+ List findAllActive();
}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java
index 65c48366..c2218bd0 100644
--- a/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java
+++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java
@@ -1,14 +1,31 @@
package site.icebang.domain.schedule.model;
+import java.time.LocalDateTime;
+
+import lombok.AccessLevel;
+import lombok.AllArgsConstructor;
+import lombok.Builder;
import lombok.Getter;
+import lombok.NoArgsConstructor;
import lombok.Setter;
@Getter
-@Setter
+@Setter // 서비스 레이어에서의 상태 변경 및 MyBatis 매핑을 위해 사용
+@Builder
+@NoArgsConstructor(access = AccessLevel.PROTECTED)
+@AllArgsConstructor
public class Schedule {
- private Long scheduleId;
+
+ private Long id;
private Long workflowId;
private String cronExpression;
+ private String parameters; // JSON format
private boolean isActive;
- // ... 기타 필요한 컬럼
+ private String lastRunStatus;
+ private LocalDateTime lastRunAt;
+ private LocalDateTime createdAt;
+ private Long createdBy;
+ private LocalDateTime updatedAt;
+ private Long updatedBy;
+ private String scheduleText;
}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/schedule/runner/SchedulerInitializer.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/runner/SchedulerInitializer.java
deleted file mode 100644
index 0dfb8b33..00000000
--- a/apps/user-service/src/main/java/site/icebang/domain/schedule/runner/SchedulerInitializer.java
+++ /dev/null
@@ -1,31 +0,0 @@
-package site.icebang.domain.schedule.runner;
-
-import java.util.List;
-
-import org.springframework.boot.ApplicationArguments;
-import org.springframework.boot.ApplicationRunner;
-import org.springframework.stereotype.Component;
-
-import lombok.RequiredArgsConstructor;
-import lombok.extern.slf4j.Slf4j;
-
-import site.icebang.domain.schedule.mapper.ScheduleMapper;
-import site.icebang.domain.schedule.model.Schedule;
-import site.icebang.domain.schedule.service.DynamicSchedulerService;
-
-@Slf4j
-@Component
-@RequiredArgsConstructor
-public class SchedulerInitializer implements ApplicationRunner {
-
- private final ScheduleMapper scheduleMapper;
- private final DynamicSchedulerService dynamicSchedulerService;
-
- @Override
- public void run(ApplicationArguments args) {
- log.info(">>>> Initializing schedules from database...");
- List activeSchedules = scheduleMapper.findAllByIsActive(true);
- activeSchedules.forEach(dynamicSchedulerService::register);
- log.info(">>>> {} active schedules have been registered.", activeSchedules.size());
- }
-}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/schedule/service/DynamicSchedulerService.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/service/DynamicSchedulerService.java
deleted file mode 100644
index 372e0e1d..00000000
--- a/apps/user-service/src/main/java/site/icebang/domain/schedule/service/DynamicSchedulerService.java
+++ /dev/null
@@ -1,66 +0,0 @@
-package site.icebang.domain.schedule.service;
-
-import java.time.LocalDateTime;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ScheduledFuture;
-
-import org.springframework.batch.core.Job;
-import org.springframework.batch.core.JobParametersBuilder;
-import org.springframework.batch.core.launch.JobLauncher;
-import org.springframework.context.ApplicationContext;
-import org.springframework.scheduling.TaskScheduler;
-import org.springframework.scheduling.support.CronTrigger;
-import org.springframework.stereotype.Service;
-
-import lombok.RequiredArgsConstructor;
-import lombok.extern.slf4j.Slf4j;
-
-import site.icebang.domain.schedule.model.Schedule;
-
-@Slf4j
-@Service
-@RequiredArgsConstructor
-public class DynamicSchedulerService {
-
- private final TaskScheduler taskScheduler;
- private final JobLauncher jobLauncher;
- private final ApplicationContext applicationContext;
- private final Map> scheduledTasks = new ConcurrentHashMap<>();
-
- public void register(Schedule schedule) {
- // TODO: schedule.getWorkflowId()를 기반으로 실행할 Job의 이름을 DB에서 조회
- String jobName = "blogContentJob"; // 예시
- Job jobToRun = applicationContext.getBean(jobName, Job.class);
-
- Runnable runnable =
- () -> {
- try {
- JobParametersBuilder paramsBuilder = new JobParametersBuilder();
- paramsBuilder.addString("runAt", LocalDateTime.now().toString());
- paramsBuilder.addLong("scheduleId", schedule.getScheduleId());
- jobLauncher.run(jobToRun, paramsBuilder.toJobParameters());
- } catch (Exception e) {
- log.error(
- "Failed to run scheduled job for scheduleId: {}", schedule.getScheduleId(), e);
- }
- };
-
- CronTrigger trigger = new CronTrigger(schedule.getCronExpression());
- ScheduledFuture> future = taskScheduler.schedule(runnable, trigger);
- scheduledTasks.put(schedule.getScheduleId(), future);
- log.info(
- ">>>> Schedule registered: id={}, cron={}",
- schedule.getScheduleId(),
- schedule.getCronExpression());
- }
-
- public void remove(Long scheduleId) {
- ScheduledFuture> future = scheduledTasks.get(scheduleId);
- if (future != null) {
- future.cancel(true);
- scheduledTasks.remove(scheduleId);
- log.info(">>>> Schedule removed: id={}", scheduleId);
- }
- }
-}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/schedule/service/QuartzScheduleService.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/service/QuartzScheduleService.java
new file mode 100644
index 00000000..3a5f1aef
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/service/QuartzScheduleService.java
@@ -0,0 +1,43 @@
+package site.icebang.domain.schedule.service;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.quartz.*;
+import org.springframework.stereotype.Service;
+import site.icebang.domain.schedule.model.Schedule;
+import site.icebang.domain.workflow.scheduler.WorkflowTriggerJob;
+
+@Slf4j
+@Service
+@RequiredArgsConstructor
+public class QuartzScheduleService {
+
+ private final Scheduler scheduler;
+
+ public void addOrUpdateSchedule(Schedule schedule) {
+ JobKey jobKey = JobKey.jobKey("workflow-" + schedule.getWorkflowId());
+ JobDetail jobDetail = JobBuilder.newJob(WorkflowTriggerJob.class)
+ .withIdentity(jobKey)
+ .withDescription("Workflow " + schedule.getWorkflowId() + " Trigger Job")
+ .usingJobData("workflowId", schedule.getWorkflowId())
+ .storeDurably()
+ .build();
+
+ TriggerKey triggerKey = TriggerKey.triggerKey("trigger-for-workflow-" + schedule.getWorkflowId());
+ Trigger trigger = TriggerBuilder.newTrigger()
+ .forJob(jobDetail)
+ .withIdentity(triggerKey)
+ .withSchedule(CronScheduleBuilder.cronSchedule(schedule.getCronExpression()))
+ .build();
+ try {
+ scheduler.scheduleJob(jobDetail, trigger);
+ log.info("Quartz 스케줄 등록/업데이트 완료: Workflow ID {}", schedule.getWorkflowId());
+ } catch (SchedulerException e) {
+ log.error("Quartz 스케줄 등록 실패", e);
+ }
+ }
+
+ public void deleteSchedule(Long workflowId) {
+ // ... (삭제 로직)
+ }
+}
\ No newline at end of file
diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java
new file mode 100644
index 00000000..348058ee
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java
@@ -0,0 +1,37 @@
+package site.icebang.domain.workflow.controller;
+
+import java.util.concurrent.CompletableFuture;
+
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.*;
+
+import lombok.RequiredArgsConstructor;
+
+import site.icebang.common.dto.ApiResponse;
+import site.icebang.common.dto.PageParams;
+import site.icebang.common.dto.PageResult;
+import site.icebang.domain.workflow.dto.WorkflowCardDto;
+import site.icebang.domain.workflow.service.WorkflowExecutionService;
+import site.icebang.domain.workflow.service.WorkflowService;
+
+@RestController
+@RequestMapping("/v0/workflows")
+@RequiredArgsConstructor
+public class WorkflowController {
+ private final WorkflowService workflowService;
+ private final WorkflowExecutionService workflowExecutionService;
+
+ @GetMapping("")
+ public ApiResponse> getWorkflowList(
+ @ModelAttribute PageParams pageParams) {
+ PageResult result = workflowService.getPagedResult(pageParams);
+ return ApiResponse.success(result);
+ }
+
+ @PostMapping("/{workflowId}/run")
+ public ResponseEntity runWorkflow(@PathVariable Long workflowId) {
+ // HTTP 요청/응답 스레드를 블로킹하지 않도록 비동기 실행
+ CompletableFuture.runAsync(() -> workflowExecutionService.executeWorkflow(workflowId));
+ return ResponseEntity.accepted().build();
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java
new file mode 100644
index 00000000..a39ce0c3
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java
@@ -0,0 +1,16 @@
+package site.icebang.domain.workflow.dto;
+
+import java.math.BigInteger;
+import java.time.LocalDateTime;
+
+import lombok.Data;
+
+@Data
+public class WorkflowCardDto {
+ private BigInteger id;
+ private String name;
+ private String description;
+ private boolean isEnabled;
+ private String createdBy;
+ private LocalDateTime createdAt;
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/JobMapper.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/JobMapper.java
new file mode 100644
index 00000000..a82739f4
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/JobMapper.java
@@ -0,0 +1,15 @@
+package site.icebang.domain.workflow.mapper;
+
+import java.util.List;
+
+import org.apache.ibatis.annotations.Mapper;
+
+import site.icebang.domain.workflow.model.Job;
+import site.icebang.domain.workflow.model.Task;
+
+@Mapper
+public interface JobMapper {
+ List findJobsByWorkflowId(Long workflowId);
+
+ List findTasksByJobId(Long jobId);
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/TaskMapper.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/TaskMapper.java
new file mode 100644
index 00000000..0edb7812
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/TaskMapper.java
@@ -0,0 +1,12 @@
+package site.icebang.domain.workflow.mapper;
+
+import java.util.Optional;
+
+import org.apache.ibatis.annotations.Mapper;
+
+import site.icebang.domain.workflow.model.Task;
+
+@Mapper
+public interface TaskMapper {
+ Optional findById(Long id);
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java
new file mode 100644
index 00000000..00afbebc
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java
@@ -0,0 +1,15 @@
+package site.icebang.domain.workflow.mapper;
+
+import java.math.BigInteger;
+import java.util.*;
+
+import site.icebang.common.dto.PageParams;
+import site.icebang.domain.workflow.dto.WorkflowCardDto;
+
+public interface WorkflowMapper {
+ List selectWorkflowList(PageParams pageParams);
+
+ int selectWorkflowCount(PageParams pageParams);
+
+ WorkflowCardDto selectWorkflowById(BigInteger id);
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java
new file mode 100644
index 00000000..0a3604b5
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java
@@ -0,0 +1,22 @@
+package site.icebang.domain.workflow.model;
+
+import java.time.LocalDateTime;
+
+import lombok.AccessLevel;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+
+@Getter
+@NoArgsConstructor(access = AccessLevel.PROTECTED)
+@AllArgsConstructor
+public class Job {
+ private Long id;
+ private String name;
+ private String description;
+ private boolean isEnabled;
+ private LocalDateTime createdAt;
+ private Long createdBy;
+ private LocalDateTime updatedAt;
+ private Long updatedBy;
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java
new file mode 100644
index 00000000..09589cc1
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java
@@ -0,0 +1,20 @@
+package site.icebang.domain.workflow.model;
+
+import com.fasterxml.jackson.databind.JsonNode;
+
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+
+@Getter
+@NoArgsConstructor // MyBatis가 객체를 생성하기 위해 필요
+public class Task {
+
+ private Long id;
+ private String name;
+
+ /** Task의 타입 (예: "HTTP", "SPRING_BATCH") 이 타입에 따라 TaskRunner가 선택됩니다. */
+ private String type;
+
+ /** Task 실행에 필요한 파라미터 (JSON) 예: {"url": "http://...", "method": "POST", "body": {...}} */
+ private JsonNode parameters;
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java
new file mode 100644
index 00000000..3ea80388
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java
@@ -0,0 +1,28 @@
+package site.icebang.domain.workflow.model;
+
+import java.time.LocalDateTime;
+
+import lombok.AccessLevel;
+import lombok.AllArgsConstructor;
+import lombok.Builder;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+
+@Getter
+@Builder
+@NoArgsConstructor(access = AccessLevel.PROTECTED)
+@AllArgsConstructor
+public class Workflow {
+
+ private Long id;
+ private String name;
+ private String description;
+ private boolean isEnabled;
+ private LocalDateTime createdAt;
+ private Long createdBy;
+ private LocalDateTime updatedAt;
+ private Long updatedBy;
+
+ /** 워크플로우별 기본 설정값 (JSON) */
+ private String defaultConfig;
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/HttpTaskRunner.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/HttpTaskRunner.java
new file mode 100644
index 00000000..9f497b97
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/HttpTaskRunner.java
@@ -0,0 +1,49 @@
+package site.icebang.domain.workflow.runner;
+
+import org.springframework.http.*;
+import org.springframework.stereotype.Component;
+import org.springframework.web.client.RestClientException;
+import org.springframework.web.client.RestTemplate;
+
+import com.fasterxml.jackson.databind.JsonNode;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+
+import site.icebang.domain.execution.model.TaskRun;
+import site.icebang.domain.workflow.model.Task;
+
+@Slf4j
+@Component("httpTaskRunner")
+@RequiredArgsConstructor
+public class HttpTaskRunner implements TaskRunner {
+ private final RestTemplate restTemplate;
+
+ @Override
+ public TaskExecutionResult execute(Task task, TaskRun taskRun) {
+ JsonNode params = task.getParameters();
+ String url = params.get("url").asText();
+ String method = params.get("method").asText();
+ JsonNode body = params.get("body");
+
+ try {
+ HttpEntity requestEntity =
+ new HttpEntity<>(
+ body.toString(),
+ new HttpHeaders() {
+ {
+ setContentType(MediaType.APPLICATION_JSON);
+ }
+ });
+
+ ResponseEntity response =
+ restTemplate.exchange(
+ url, HttpMethod.valueOf(method.toUpperCase()), requestEntity, String.class);
+
+ return TaskExecutionResult.success(response.getBody());
+ } catch (RestClientException e) {
+ log.error("HTTP Task 실행 실패: TaskRunId={}, Error={}", taskRun.getId(), e.getMessage());
+ return TaskExecutionResult.failure(e.getMessage());
+ }
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/TaskRunner.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/TaskRunner.java
new file mode 100644
index 00000000..a2b820bb
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/TaskRunner.java
@@ -0,0 +1,22 @@
+package site.icebang.domain.workflow.runner;
+
+import site.icebang.domain.execution.model.TaskRun;
+import site.icebang.domain.workflow.model.Task;
+
+public interface TaskRunner {
+ record TaskExecutionResult(String status, String message) {
+ public static TaskExecutionResult success(String message) {
+ return new TaskExecutionResult("SUCCESS", message);
+ }
+
+ public static TaskExecutionResult failure(String message) {
+ return new TaskExecutionResult("FAILED", message);
+ }
+
+ public boolean isFailure() {
+ return "FAILED".equals(status);
+ }
+ }
+
+ TaskExecutionResult execute(Task task, TaskRun taskRun);
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/scheduler/WorkflowTriggerJob.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/scheduler/WorkflowTriggerJob.java
new file mode 100644
index 00000000..196c1fa0
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/scheduler/WorkflowTriggerJob.java
@@ -0,0 +1,24 @@
+package site.icebang.domain.workflow.scheduler;
+
+import org.quartz.JobExecutionContext;
+import org.springframework.scheduling.quartz.QuartzJobBean;
+import org.springframework.stereotype.Component;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+
+import site.icebang.domain.workflow.service.WorkflowExecutionService;
+
+@Slf4j
+@Component
+@RequiredArgsConstructor
+public class WorkflowTriggerJob extends QuartzJobBean {
+ private final WorkflowExecutionService workflowExecutionService;
+
+ @Override
+ protected void executeInternal(JobExecutionContext context) {
+ Long workflowId = context.getJobDetail().getJobDataMap().getLong("workflowId");
+ log.info("Quartz가 WorkflowTriggerJob을 실행합니다. WorkflowId={}", workflowId);
+ workflowExecutionService.executeWorkflow(workflowId);
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java
new file mode 100644
index 00000000..086b00de
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java
@@ -0,0 +1,111 @@
+package site.icebang.domain.workflow.service;
+
+import java.util.List;
+import java.util.Map;
+
+import org.springframework.stereotype.Service;
+import org.springframework.transaction.annotation.Transactional;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+
+import site.icebang.domain.execution.mapper.JobRunMapper;
+import site.icebang.domain.execution.mapper.TaskRunMapper;
+import site.icebang.domain.execution.mapper.WorkflowRunMapper;
+import site.icebang.domain.execution.model.JobRun;
+import site.icebang.domain.execution.model.TaskRun;
+import site.icebang.domain.execution.model.WorkflowRun;
+import site.icebang.domain.workflow.mapper.JobMapper;
+import site.icebang.domain.workflow.model.Job;
+import site.icebang.domain.workflow.model.Task;
+import site.icebang.domain.workflow.runner.TaskRunner;
+
+@Slf4j
+@Service
+@RequiredArgsConstructor
+public class WorkflowExecutionService {
+
+ private final JobMapper jobMapper;
+ private final WorkflowRunMapper workflowRunMapper;
+ private final JobRunMapper jobRunMapper;
+ private final TaskRunMapper taskRunMapper;
+ private final Map taskRunners;
+
+ /**
+ * 워크플로우 실행의 시작점. 전체 과정은 하나의 트랜잭션으로 묶입니다.
+ *
+ * @param workflowId 실행할 워크플로우의 ID
+ */
+ @Transactional
+ public void executeWorkflow(Long workflowId) {
+ log.info("========== 워크플로우 실행 시작: WorkflowId={} ==========", workflowId);
+ WorkflowRun workflowRun = WorkflowRun.start(workflowId);
+ workflowRunMapper.insert(workflowRun);
+
+ List jobs = jobMapper.findJobsByWorkflowId(workflowId);
+ log.info("총 {}개의 Job을 순차적으로 실행합니다.", jobs.size());
+
+ for (Job job : jobs) {
+ JobRun jobRun = JobRun.start(workflowRun.getId(), job.getId());
+ jobRunMapper.insert(jobRun);
+ log.info(
+ "---------- Job 실행 시작: JobId={}, JobRunId={} ----------", job.getId(), jobRun.getId());
+
+ boolean jobSucceeded = executeTasksForJob(jobRun);
+
+ jobRun.finish(jobSucceeded ? "SUCCESS" : "FAILED");
+ jobRunMapper.update(jobRun);
+
+ if (!jobSucceeded) {
+ workflowRun.finish("FAILED");
+ workflowRunMapper.update(workflowRun);
+ log.error("Job 실패로 인해 워크플로우 실행을 중단합니다: WorkflowRunId={}", workflowRun.getId());
+ return; // Job이 실패하면 전체 워크플로우를 중단
+ }
+ log.info("---------- Job 실행 성공: JobRunId={} ----------", jobRun.getId());
+ }
+
+ workflowRun.finish("SUCCESS");
+ workflowRunMapper.update(workflowRun);
+ log.info("========== 워크플로우 실행 성공: WorkflowRunId={} ==========", workflowRun.getId());
+ }
+
+ /**
+ * 특정 Job에 속한 Task들을 순차적으로 실행합니다.
+ *
+ * @param jobRun 실행중인 Job의 기록 객체
+ * @return 모든 Task가 성공하면 true, 하나라도 실패하면 false
+ */
+ private boolean executeTasksForJob(JobRun jobRun) {
+ List tasks = jobMapper.findTasksByJobId(jobRun.getJobId());
+ log.info("Job (JobRunId={}) 내 총 {}개의 Task를 실행합니다.", jobRun.getId(), tasks.size());
+
+ for (Task task : tasks) {
+ TaskRun taskRun = TaskRun.start(jobRun.getId(), task.getId());
+ taskRunMapper.insert(taskRun);
+ log.info("Task 실행 시작: TaskId={}, TaskRunId={}", task.getId(), taskRun.getId());
+
+ String runnerBeanName = task.getType().toLowerCase() + "TaskRunner";
+ TaskRunner runner = taskRunners.get(runnerBeanName);
+
+ if (runner == null) {
+ taskRun.finish("FAILED", "지원하지 않는 Task 타입: " + task.getType());
+ taskRunMapper.update(taskRun);
+ log.error("Task 실행 실패 (미지원 타입): TaskRunId={}, Type={}", taskRun.getId(), task.getType());
+ return false; // 실행할 Runner가 없으므로 실패
+ }
+
+ TaskRunner.TaskExecutionResult result = runner.execute(task, taskRun);
+ taskRun.finish(result.status(), result.message());
+ taskRunMapper.update(taskRun);
+
+ if (result.isFailure()) {
+ log.error("Task 실행 실패: TaskRunId={}, Message={}", taskRun.getId(), result.message());
+ return false; // Task가 실패하면 즉시 중단하고 실패 반환
+ }
+ log.info("Task 실행 성공: TaskRunId={}", taskRun.getId());
+ }
+
+ return true; // 모든 Task가 성공적으로 완료됨
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java
new file mode 100644
index 00000000..71600b4b
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java
@@ -0,0 +1,28 @@
+package site.icebang.domain.workflow.service;
+
+import org.springframework.stereotype.Service;
+import org.springframework.transaction.annotation.Transactional;
+
+import lombok.RequiredArgsConstructor;
+
+import site.icebang.common.dto.PageParams;
+import site.icebang.common.dto.PageResult;
+import site.icebang.common.service.PageableService;
+import site.icebang.domain.workflow.dto.WorkflowCardDto;
+import site.icebang.domain.workflow.mapper.WorkflowMapper;
+
+@Service
+@RequiredArgsConstructor
+public class WorkflowService implements PageableService {
+
+ private final WorkflowMapper workflowMapper;
+
+ @Override
+ @Transactional(readOnly = true)
+ public PageResult getPagedResult(PageParams pageParams) {
+ return PageResult.from(
+ pageParams,
+ () -> workflowMapper.selectWorkflowList(pageParams),
+ () -> workflowMapper.selectWorkflowCount(pageParams));
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/global/aop/logging/WorkflowLoggingAspect.java b/apps/user-service/src/main/java/site/icebang/global/aop/logging/WorkflowLoggingAspect.java
new file mode 100644
index 00000000..8e2d26c3
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/global/aop/logging/WorkflowLoggingAspect.java
@@ -0,0 +1,3 @@
+package site.icebang.global.aop.logging;
+
+public class WorkflowLoggingAspect {}
diff --git a/apps/user-service/src/main/java/site/icebang/global/config/QuartzSchedulerInitializer.java b/apps/user-service/src/main/java/site/icebang/global/config/QuartzSchedulerInitializer.java
new file mode 100644
index 00000000..233f5834
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/global/config/QuartzSchedulerInitializer.java
@@ -0,0 +1,33 @@
+package site.icebang.global.config;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.boot.CommandLineRunner;
+import org.springframework.stereotype.Component;
+import site.icebang.domain.schedule.model.Schedule;
+import site.icebang.domain.schedule.mapper.ScheduleMapper;
+import site.icebang.domain.schedule.service.QuartzScheduleService;
+import java.util.List;
+
+@Slf4j
+@Component
+@RequiredArgsConstructor
+public class QuartzSchedulerInitializer implements CommandLineRunner {
+
+ private final ScheduleMapper scheduleMapper;
+ private final QuartzScheduleService quartzScheduleService;
+
+ @Override
+ public void run(String... args) {
+ log.info("Quartz 스케줄러 초기화 시작: DB 스케줄을 등록합니다.");
+ try {
+ List activeSchedules = scheduleMapper.findAllActive();
+ for (Schedule schedule : activeSchedules) {
+ quartzScheduleService.addOrUpdateSchedule(schedule);
+ }
+ log.info("총 {}개의 활성 스케줄을 Quartz에 성공적으로 등록했습니다.", activeSchedules.size());
+ } catch (Exception e) {
+ log.error("Quartz 스케줄 초기화 중 오류가 발생했습니다.", e);
+ }
+ }
+}
\ No newline at end of file
diff --git a/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/JsonNodeTypeHandler.java b/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/JsonNodeTypeHandler.java
new file mode 100644
index 00000000..4079c9f3
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/JsonNodeTypeHandler.java
@@ -0,0 +1,56 @@
+package site.icebang.global.config.mybatis.typehandler;
+
+import java.sql.CallableStatement;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+import org.apache.ibatis.type.BaseTypeHandler;
+import org.apache.ibatis.type.JdbcType;
+import org.apache.ibatis.type.MappedTypes;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+@MappedTypes(JsonNode.class)
+public class JsonNodeTypeHandler extends BaseTypeHandler {
+
+ private static final ObjectMapper objectMapper = new ObjectMapper();
+
+ @Override
+ public void setNonNullParameter(
+ PreparedStatement ps, int i, JsonNode parameter, JdbcType jdbcType) throws SQLException {
+ try {
+ ps.setString(i, objectMapper.writeValueAsString(parameter));
+ } catch (JsonProcessingException e) {
+ throw new SQLException("Error converting JsonNode to String", e);
+ }
+ }
+
+ @Override
+ public JsonNode getNullableResult(ResultSet rs, String columnName) throws SQLException {
+ return parseJson(rs.getString(columnName));
+ }
+
+ @Override
+ public JsonNode getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
+ return parseJson(rs.getString(columnIndex));
+ }
+
+ @Override
+ public JsonNode getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {
+ return parseJson(cs.getString(columnIndex));
+ }
+
+ private JsonNode parseJson(String json) throws SQLException {
+ if (json == null) {
+ return null;
+ }
+ try {
+ return objectMapper.readTree(json);
+ } catch (JsonProcessingException e) {
+ throw new SQLException("Error parsing JSON", e);
+ }
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/global/config/scheduler/SchedulerConfig.java b/apps/user-service/src/main/java/site/icebang/global/config/scheduler/SchedulerConfig.java
deleted file mode 100644
index 79fc6436..00000000
--- a/apps/user-service/src/main/java/site/icebang/global/config/scheduler/SchedulerConfig.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package site.icebang.global.config.scheduler;
-
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.scheduling.TaskScheduler;
-import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
-
-/** 동적 스케줄링을 위한 TaskScheduler Bean을 설정하는 클래스 */
-@Configuration
-public class SchedulerConfig {
-
- @Bean
- public TaskScheduler taskScheduler() {
- // ThreadPool 기반의 TaskScheduler를 생성합니다.
- ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler();
-
- // 스케줄러가 사용할 스레드 풀의 크기를 설정합니다.
- // 동시에 실행될 수 있는 스케줄 작업의 최대 개수입니다.
- scheduler.setPoolSize(10);
-
- // 스레드 이름의 접두사를 설정하여 로그 추적을 용이하게 합니다.
- scheduler.setThreadNamePrefix("dynamic-scheduler-");
-
- // 스케줄러를 초기화합니다.
- scheduler.initialize();
- return scheduler;
- }
-}
diff --git a/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java b/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java
index aba3ee3c..61d668cc 100644
--- a/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java
+++ b/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java
@@ -19,16 +19,23 @@
import org.springframework.web.cors.UrlBasedCorsConfigurationSource;
import org.springframework.web.filter.CorsFilter;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
import lombok.RequiredArgsConstructor;
import site.icebang.domain.auth.service.AuthCredentialAdapter;
import site.icebang.global.config.security.endpoints.SecurityEndpoints;
+import site.icebang.global.handler.exception.RestAccessDeniedHandler;
+import site.icebang.global.handler.exception.RestAuthenticationEntryPoint;
@Configuration
@RequiredArgsConstructor
public class SecurityConfig {
private final Environment environment;
private final AuthCredentialAdapter userDetailsService;
+ private final ObjectMapper objectMapper;
+ private final RestAuthenticationEntryPoint restAuthenticationEntryPoint;
+ private final RestAccessDeniedHandler restAccessDeniedHandler;
@Bean
public AuthenticationProvider authenticationProvider() {
@@ -57,6 +64,8 @@ public SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
.permitAll()
.requestMatchers("/auth/login", "/auth/logout")
.permitAll()
+ .requestMatchers("/v0/workflows/**")
+ .permitAll()
.requestMatchers("/v0/auth/check-session")
.authenticated()
.requestMatchers(SecurityEndpoints.DATA_ADMIN.getMatchers())
@@ -97,6 +106,10 @@ public SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
.logout(
logout -> logout.logoutUrl("/auth/logout").logoutSuccessUrl("/auth/login").permitAll())
.csrf(AbstractHttpConfigurer::disable)
+ .exceptionHandling(
+ ex ->
+ ex.authenticationEntryPoint(restAuthenticationEntryPoint)
+ .accessDeniedHandler(restAccessDeniedHandler))
.build();
}
diff --git a/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java b/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java
index 019337dc..e6e24243 100644
--- a/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java
+++ b/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java
@@ -11,7 +11,8 @@ public enum SecurityEndpoints {
"/js/**",
"/images/**",
"/v0/organizations/**",
- "/v0/auth/register"),
+ "/v0/auth/register",
+ "/v0/check-execution-log-insert"),
// 데이터 관리 관련 엔드포인트
DATA_ADMIN("/admin/**", "/api/admin/**", "/management/**", "/actuator/**"),
@@ -26,7 +27,7 @@ public enum SecurityEndpoints {
OPS("/api/scheduler/**", "/api/monitoring/**"),
// 일반 사용자 엔드포인트
- USER("/user/**", "/profile/**", "/v0/auth/check-session");
+ USER("/user/**", "/profile/**", "/v0/auth/check-session", "/v0/workflows/**");
private final String[] patterns;
diff --git a/apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java b/apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java
deleted file mode 100644
index e89f2d80..00000000
--- a/apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package site.icebang.global.filter;
-
-import java.io.IOException;
-import java.util.UUID;
-
-import org.slf4j.MDC;
-import org.springframework.stereotype.Component;
-import org.springframework.web.filter.OncePerRequestFilter;
-
-import jakarta.servlet.FilterChain;
-import jakarta.servlet.ServletException;
-import jakarta.servlet.http.HttpServletRequest;
-import jakarta.servlet.http.HttpServletResponse;
-
-@Component
-public class LoggingFilter extends OncePerRequestFilter {
-
- public static final String TRACE_ID_HEADER = "X-Request-ID";
-
- @Override
- protected void doFilterInternal(
- HttpServletRequest request, HttpServletResponse response, FilterChain filterChain)
- throws ServletException, IOException {
-
- // 다른 시스템에서 이미 전달한 Trace ID가 있는지 확인
- String traceId = request.getHeader(TRACE_ID_HEADER);
-
- // 없다면 새로 생성 (요청의 시작점)
- if (traceId == null || traceId.isEmpty()) {
- traceId = UUID.randomUUID().toString();
- }
-
- MDC.put("traceId", traceId.substring(0, 8));
-
- // ⭐️ 요청 객체에 attribute로 traceId를 저장하여 컨트롤러 등에서 사용할 수 있게 함
- request.setAttribute("X-Request-ID", traceId);
-
- // 응답 헤더에 traceId를 넣어주면 클라이언트가 추적하기 용이
- response.setHeader(TRACE_ID_HEADER, traceId);
-
- filterChain.doFilter(request, response);
- }
-}
diff --git a/apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java b/apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java
new file mode 100644
index 00000000..8243acde
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java
@@ -0,0 +1,58 @@
+package site.icebang.global.handler.exception;
+
+import org.springframework.http.HttpStatus;
+import org.springframework.security.access.AccessDeniedException;
+import org.springframework.security.core.AuthenticationException;
+import org.springframework.web.bind.MethodArgumentNotValidException;
+import org.springframework.web.bind.annotation.ExceptionHandler;
+import org.springframework.web.bind.annotation.ResponseStatus;
+import org.springframework.web.bind.annotation.RestControllerAdvice;
+import org.springframework.web.servlet.resource.NoResourceFoundException;
+
+import lombok.extern.slf4j.Slf4j;
+
+import site.icebang.common.dto.ApiResponse;
+import site.icebang.common.exception.DuplicateDataException;
+
+@RestControllerAdvice
+@Slf4j
+public class GlobalExceptionHandler {
+ @ExceptionHandler(MethodArgumentNotValidException.class)
+ @ResponseStatus(HttpStatus.BAD_REQUEST)
+ public ApiResponse handleValidation(MethodArgumentNotValidException ex) {
+ String detail = ex.getBindingResult().toString();
+ return ApiResponse.error("Validation failed: " + detail, HttpStatus.BAD_REQUEST);
+ }
+
+ @ExceptionHandler(Exception.class)
+ @ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR)
+ public ApiResponse handleGeneric(Exception ex) {
+ log.error(ex.getMessage(), ex);
+ return ApiResponse.error("Internal error: ", HttpStatus.INTERNAL_SERVER_ERROR);
+ }
+
+ @ExceptionHandler(NoResourceFoundException.class)
+ @ResponseStatus(HttpStatus.NOT_FOUND)
+ public ApiResponse handleNotFound(NoResourceFoundException ex) {
+ return ApiResponse.error("Notfound: " + ex.getMessage(), HttpStatus.NOT_FOUND);
+ }
+
+ @ExceptionHandler(AuthenticationException.class)
+ @ResponseStatus(HttpStatus.UNAUTHORIZED)
+ public ApiResponse handleAuthentication(AuthenticationException ex) {
+ return ApiResponse.error("Authentication failed: " + ex.getMessage(), HttpStatus.UNAUTHORIZED);
+ }
+
+ @ExceptionHandler(AccessDeniedException.class)
+ @ResponseStatus(HttpStatus.FORBIDDEN)
+ public ApiResponse handleAccessDenied(AccessDeniedException ex) {
+ return ApiResponse.error("Access denied: " + ex.getMessage(), HttpStatus.FORBIDDEN);
+ }
+
+ @ExceptionHandler(DuplicateDataException.class)
+ @ResponseStatus(HttpStatus.CONFLICT)
+ public ApiResponse handleDuplicateData(DuplicateDataException ex) {
+ log.warn(ex.getMessage(), ex);
+ return ApiResponse.error("Duplicate: " + ex.getMessage(), HttpStatus.CONFLICT);
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/global/handler/exception/RestAccessDeniedHandler.java b/apps/user-service/src/main/java/site/icebang/global/handler/exception/RestAccessDeniedHandler.java
new file mode 100644
index 00000000..efeffde1
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/global/handler/exception/RestAccessDeniedHandler.java
@@ -0,0 +1,33 @@
+package site.icebang.global.handler.exception;
+
+import java.io.IOException;
+
+import org.springframework.http.HttpStatus;
+import org.springframework.security.access.AccessDeniedException;
+import org.springframework.security.web.access.AccessDeniedHandler;
+import org.springframework.stereotype.Component;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
+import lombok.RequiredArgsConstructor;
+
+import site.icebang.common.dto.ApiResponse;
+
+@Component
+@RequiredArgsConstructor
+public class RestAccessDeniedHandler implements AccessDeniedHandler {
+ private final ObjectMapper objectMapper;
+
+ @Override
+ public void handle(
+ HttpServletRequest request, HttpServletResponse response, AccessDeniedException ex)
+ throws IOException {
+ ApiResponse body = ApiResponse.error("Access denied", HttpStatus.FORBIDDEN);
+
+ response.setStatus(HttpServletResponse.SC_FORBIDDEN);
+ response.setContentType("application/json;charset=UTF-8");
+ response.getWriter().write(objectMapper.writeValueAsString(body));
+ }
+}
diff --git a/apps/user-service/src/main/java/site/icebang/global/handler/exception/RestAuthenticationEntryPoint.java b/apps/user-service/src/main/java/site/icebang/global/handler/exception/RestAuthenticationEntryPoint.java
new file mode 100644
index 00000000..b7c50d76
--- /dev/null
+++ b/apps/user-service/src/main/java/site/icebang/global/handler/exception/RestAuthenticationEntryPoint.java
@@ -0,0 +1,34 @@
+package site.icebang.global.handler.exception;
+
+import java.io.IOException;
+
+import org.springframework.http.HttpStatus;
+import org.springframework.security.core.AuthenticationException;
+import org.springframework.security.web.AuthenticationEntryPoint;
+import org.springframework.stereotype.Component;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
+import lombok.RequiredArgsConstructor;
+
+import site.icebang.common.dto.ApiResponse;
+
+@Component
+@RequiredArgsConstructor
+public class RestAuthenticationEntryPoint implements AuthenticationEntryPoint {
+ private final ObjectMapper objectMapper;
+
+ @Override
+ public void commence(
+ HttpServletRequest request, HttpServletResponse response, AuthenticationException ex)
+ throws IOException {
+ ApiResponse body =
+ ApiResponse.error("Authentication required", HttpStatus.UNAUTHORIZED);
+
+ response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
+ response.setContentType("application/json;charset=UTF-8");
+ response.getWriter().write(objectMapper.writeValueAsString(body));
+ }
+}
diff --git a/apps/user-service/src/main/resources/application-develop.yml b/apps/user-service/src/main/resources/application-develop.yml
index e7bc3f09..336d62ae 100644
--- a/apps/user-service/src/main/resources/application-develop.yml
+++ b/apps/user-service/src/main/resources/application-develop.yml
@@ -33,8 +33,23 @@ spring:
- classpath:sql/00-truncate.sql
- classpath:sql/01-insert-internal-users.sql
- classpath:sql/02-insert-external-users.sql
+ - classpath:sql/03-insert-workflow.sql
encoding: UTF-8
+# # Spring Quartz 스케줄러 설정
+# quartz:
+# job-store-type: jdbc
+# auto-startup: true
+# jdbc:
+# initialize-schema: embedded # 운영 환경을 기준으로 기본값 설정
+# properties:
+# org.quartz.scheduler.instanceId: AUTO
+# org.quartz.jobStore.class: org.quartz.impl.jdbcjobstore.JobStoreTX
+# org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.StdJDBCDelegate
+# org.quartz.jobStore.tablePrefix: QRTZ_ # Quartz 테이블 접두사
+# org.quartz.threadPool.class: org.quartz.simpl.SimpleThreadPool
+# org.quartz.threadPool.threadCount: 5 # 개발 환경 스레드 수
+
mybatis:
mapper-locations: classpath:mybatis/mapper/**/*.xml
type-aliases-package: site.icebang.dto
@@ -43,3 +58,9 @@ mybatis:
logging:
config: classpath:log4j2-develop.yml
+
+management:
+ tracing:
+ enabled: true
+ sampling:
+ probability: 1.0 # 100% 샘플링 (개발 환경에서만 권장)
\ No newline at end of file
diff --git a/apps/user-service/src/main/resources/application-production.yml b/apps/user-service/src/main/resources/application-production.yml
index 6b048fbd..032954ad 100644
--- a/apps/user-service/src/main/resources/application-production.yml
+++ b/apps/user-service/src/main/resources/application-production.yml
@@ -17,6 +17,10 @@ spring:
minimum-idle: 5
pool-name: HikariCP-MyBatis
+# quartz:
+# jdbc:
+# initialize-schema: never
+
mybatis:
mapper-locations: classpath:mybatis/mapper/**/*.xml
type-aliases-package: site.icebang.dto
diff --git a/apps/user-service/src/main/resources/application-test-e2e.yml b/apps/user-service/src/main/resources/application-test-e2e.yml
index f7dceba9..3a777909 100644
--- a/apps/user-service/src/main/resources/application-test-e2e.yml
+++ b/apps/user-service/src/main/resources/application-test-e2e.yml
@@ -18,4 +18,4 @@ mybatis:
map-underscore-to-camel-case: true
logging:
- config: classpath:log4j2-production.yml
\ No newline at end of file
+ config: classpath:log4j2-test-e2e.yml
\ No newline at end of file
diff --git a/apps/user-service/src/main/resources/application-test-integration.yml b/apps/user-service/src/main/resources/application-test-integration.yml
index 0ed34f36..526cf151 100644
--- a/apps/user-service/src/main/resources/application-test-integration.yml
+++ b/apps/user-service/src/main/resources/application-test-integration.yml
@@ -39,4 +39,4 @@ mybatis:
map-underscore-to-camel-case: true
logging:
- config: classpath:log4j2-develop.yml
\ No newline at end of file
+ config: classpath:log4j2-test-unit.yml
\ No newline at end of file
diff --git a/apps/user-service/src/main/resources/application.yml b/apps/user-service/src/main/resources/application.yml
index d6f68b0e..706eceea 100644
--- a/apps/user-service/src/main/resources/application.yml
+++ b/apps/user-service/src/main/resources/application.yml
@@ -7,10 +7,11 @@ spring:
context:
cache:
maxSize: 1
+
mybatis:
# Mapper XML 파일 위치
mapper-locations: classpath:mapper/**/*.xml
- type-handlers-package: site.icebang.config.mybatis.typehandler
+ type-handlers-package: site.icebang.global.config.mybatis.typehandler
# 외부 API 연동을 위한 설정 섹션
api:
diff --git a/apps/user-service/src/main/resources/log4j2-develop.yml b/apps/user-service/src/main/resources/log4j2-develop.yml
index f900c3b1..21790eea 100644
--- a/apps/user-service/src/main/resources/log4j2-develop.yml
+++ b/apps/user-service/src/main/resources/log4j2-develop.yml
@@ -1,133 +1,156 @@
Configuration:
+ status: DEBUG
name: develop
properties:
property:
+ - name: "app-name"
+ value: "user-service"
- name: "log-path"
- value: "./logs"
+ value: "./docker/local/logs"
- name: "charset-UTF-8"
value: "UTF-8"
- # 통일된 콘솔 패턴 - 모든 로그에 RequestId 포함
+ # DEBUG 환경용 콘솔 패턴 - 더 간단하고 가독성 좋게
- name: "console-layout-pattern"
- value: "%highlight{[%-5level]} [%X{traceId}] %d{MM-dd HH:mm:ss} [%t] %n %msg%n%n"
- # 파일용 상세 패턴 - RequestId 포함
+ value: "%highlight{[%-5level]} [%X{traceId}] [%X{spanId}] %d{HH:mm:ss} [%t] %n %logger{20} - %msg%n%n "
+ # 파일용 패턴 - Promtail이 파싱하기 쉽게 구조화 (UTC 시간 사용)
- name: "file-layout-pattern"
- value: "[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"
- # 로그 파일 경로들
- - name: "info-log"
- value: ${log-path}/user-service/info.log
+ value: "[%X{traceId}] [%X{spanId}] %d{yyyy-MM-dd HH:mm:ss.SSS}{UTC} [%t] %-5level %logger{36} - %msg%n"
+ # 개발 환경용 로그 파일들 - 절대경로나 상대경로 설정
+ - name: "dev-log"
+ value: ${log-path}/develop/app.log
- name: "error-log"
- value: ${log-path}/user-service/error.log
- - name: "auth-log"
- value: ${log-path}/user-service/auth.log
- - name: "json-log"
- value: ${log-path}/user-service/json-info.log
+ value: ${log-path}/develop/error.log
- # [Appenders] 로그 기록방식 정의
Appenders:
- # 통일된 콘솔 출력
+ # 콘솔 출력 - 개발 시 주요 출력
Console:
name: console-appender
target: SYSTEM_OUT
PatternLayout:
pattern: ${console-layout-pattern}
- # 롤링 파일 로그
- RollingFile:
- name: rolling-file-appender
- fileName: ${log-path}/rolling-file.log
- filePattern: "logs/archive/rolling-file.log.%d{yyyy-MM-dd-hh-mm}_%i.gz"
- PatternLayout:
- charset: ${charset-UTF-8}
- pattern: ${file-layout-pattern}
- Policies:
- SizeBasedTriggeringPolicy:
- size: "200KB"
- TimeBasedTriggeringPolicy:
- interval: "1"
- DefaultRollOverStrategy:
- max: "30"
- fileIndex: "max"
-
- # 파일 로그들
+ # JDBC Appender - 워크플로우 로그용
+ JDBC:
+ name: workflow-appender
+ tableName: "execution_log"
+ bufferSize: 0
+ ignoreExceptions: false
+
+ DriverManager:
+ connectionString: "jdbc:mariadb://localhost:3306/pre_process"
+ driverClassName: "org.mariadb.jdbc.Driver"
+ userName: "mariadb"
+ password: "qwer1234"
+
+ ColumnMapping:
+ - name: "execution_type"
+ pattern: "%X{executionType}"
+ - name: "source_id"
+ pattern: "%X{sourceId}"
+ - name: "log_level"
+ pattern: "%level"
+ - name: "executed_at"
+ pattern: "%d{yyyy-MM-dd HH:mm:ss.SSS}"
+ - name: "log_message"
+ pattern: "%message"
+ - name: "trace_id"
+ pattern: "%X{traceId}"
+ - name: "reserved1"
+ pattern: "%X{spanId}"
+
+ # 파일 Appenders - Promtail이 이 파일들을 읽음
File:
- - name: file-info-appender
- fileName: ${info-log}
+ - name: file-dev-appender
+ fileName: ${dev-log}
PatternLayout:
pattern: ${file-layout-pattern}
+ # 로그 로테이션 설정 (선택사항)
+ # Policies:
+ # SizeBasedTriggeringPolicy:
+ # size: 10MB
+ # DefaultRolloverStrategy:
+ # max: 10
+
- name: file-error-appender
fileName: ${error-log}
PatternLayout:
pattern: ${file-layout-pattern}
- - name: file-auth-appender
- fileName: ${auth-log}
- PatternLayout:
- pattern: ${file-layout-pattern}
- - name: file-json-info-appender
- fileName: ${json-log}
- PatternLayout:
- pattern: ${file-layout-pattern}
+ ThresholdFilter:
+ level: ERROR
- # [Loggers] 로그 출력 범위를 정의
Loggers:
- # [Loggers - Root] 모든 로그를 기록하는 최상위 로그를 정의
+ # Root 로거
Root:
- level: OFF
+ level: INFO
AppenderRef:
- ref: console-appender
- - ref: rolling-file-appender
- # [Loggers - Loggers] 특정 패키지나 클래스에 대한 로그를 정의
Logger:
- # 1. Spring Framework 로그
- - name: org.springframework
+ # 애플리케이션 로그 - 파일로만 저장 (Promtail이 읽어감)
+ - name: site.icebang
additivity: "false"
level: DEBUG
AppenderRef:
- ref: console-appender
- - ref: file-info-appender
+ - ref: file-dev-appender
- ref: file-error-appender
- # 2. 애플리케이션 로그
- - name: site.icebang
+ - name: "WORKFLOW_HISTORY"
+ level: DEBUG
additivity: "false"
- level: TRACE
AppenderRef:
+ - ref: workflow-appender
- ref: console-appender
- - ref: file-info-appender
+ - ref: file-dev-appender
- ref: file-error-appender
- # 3. HikariCP 로그 비활성화
- - name: com.zaxxer.hikari
- level: OFF
+ # Spring Framework
+ - name: org.springframework
+ additivity: "false"
+ level: INFO
+ AppenderRef:
+ - ref: console-appender
+ - ref: file-dev-appender
- # 4. Spring Security 로그 - 인증/인가 추적에 중요
+ # Spring Security
- name: org.springframework.security
level: DEBUG
additivity: "false"
AppenderRef:
- ref: console-appender
- - ref: file-auth-appender
+ - ref: file-dev-appender
- # 5. 웹 요청 로그 - 요청 처리 과정 추적
+ # 웹 요청 로그
- name: org.springframework.web
level: DEBUG
additivity: "false"
AppenderRef:
- ref: console-appender
- - ref: file-info-appender
+ - ref: file-dev-appender
- # 6. 트랜잭션 로그 - DB 작업 추적
+ # 트랜잭션 로그
- name: org.springframework.transaction
level: DEBUG
additivity: "false"
AppenderRef:
- ref: console-appender
- - ref: file-info-appender
+ - ref: file-dev-appender
- - name: site.icebang.domain.auth.mapper
+ # HikariCP 로그 비활성화
+ - name: com.zaxxer.hikari
+ level: "OFF"
+
+ # SQL 로그
+ - name: org.hibernate.SQL
level: DEBUG
additivity: "false"
AppenderRef:
- ref: console-appender
- - ref: file-info-appender
\ No newline at end of file
+
+ # 파라미터 바인딩 로그
+ - name: org.hibernate.type.descriptor.sql.BasicBinder
+ level: TRACE
+ additivity: "false"
+ AppenderRef:
+ - ref: console-appender
\ No newline at end of file
diff --git a/apps/user-service/src/main/resources/log4j2-production.yml b/apps/user-service/src/main/resources/log4j2-production.yml
index 31393458..2e88bd19 100644
--- a/apps/user-service/src/main/resources/log4j2-production.yml
+++ b/apps/user-service/src/main/resources/log4j2-production.yml
@@ -1,126 +1,195 @@
Configuration:
- name: develop
+ status: INFO
+ name: production
properties:
property:
+ - name: "app-name"
+ value: "user-service"
- name: "log-path"
value: "./logs"
- name: "charset-UTF-8"
value: "UTF-8"
- # 통일된 콘솔 패턴 - 모든 로그에 RequestId 포함
+ # 프로덕션 환경용 콘솔 패턴 - 구조화된 로그
- name: "console-layout-pattern"
- value: "%highlight{[%-5level]} [%X{traceId}] %d{MM-dd HH:mm:ss} [%t] %n %msg%n%n"
- # 파일용 상세 패턴 - RequestId 포함
+ value: "%highlight{[%-5level]} [%X{traceId}] [%X{spanId}] %d{HH:mm:ss}{UTC} [%t] %logger{20} - %msg% "
+ # 파일용 패턴 - Promtail이 파싱하기 쉽게 구조화 (UTC 시간 사용)
- name: "file-layout-pattern"
- value: "[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"
- # 로그 파일 경로들
- - name: "info-log"
- value: ${log-path}/user-service/info.log
+ value: "[%X{traceId}] [%X{spanId}] %d{yyyy-MM-dd HH:mm:ss.SSS}{UTC} [%t] %-5level %logger{36} - %msg%n"
+ # 프로덕션 환경용 로그 파일들
+ - name: "prod-log"
+ value: ${log-path}/production/app.log
- name: "error-log"
- value: ${log-path}/user-service/error.log
- - name: "auth-log"
- value: ${log-path}/user-service/auth.log
- - name: "json-log"
- value: ${log-path}/user-service/json-info.log
+ value: ${log-path}/production/error.log
- # [Appenders] 로그 기록방식 정의
Appenders:
- # 통일된 콘솔 출력
+ # 콘솔 출력 - 프로덕션에서는 최소한의 정보만
Console:
name: console-appender
target: SYSTEM_OUT
PatternLayout:
pattern: ${console-layout-pattern}
- # 롤링 파일 로그
- RollingFile:
- name: rolling-file-appender
- fileName: ${log-path}/rolling-file.log
- filePattern: "logs/archive/rolling-file.log.%d{yyyy-MM-dd-hh-mm}_%i.gz"
- PatternLayout:
- charset: ${charset-UTF-8}
- pattern: ${file-layout-pattern}
- Policies:
- SizeBasedTriggeringPolicy:
- size: "200KB"
- TimeBasedTriggeringPolicy:
- interval: "1"
- DefaultRollOverStrategy:
- max: "30"
- fileIndex: "max"
-
- # 파일 로그들
+ # Loki Appender - 프로덕션 모니터링용
+ Loki:
+ name: loki-appender
+ url: ${LOKI_URL} # Grafana Cloud Loki URL
+ basicAuthUsername: ${LOKI_USERNAME} # Grafana Cloud 사용자 이름
+ basicAuthPassword: ${LOKI_PASSWORD} # Grafana Cloud API Key
+ JsonLayout:
+ compact: true
+ eventEol: true
+ includeStacktrace: true
+ KeyValuePair:
+ - key: "app"
+ value: "${app-name}"
+ - key: "env"
+ value: "production"
+ Label:
+ - name: "app"
+ value: "${app-name}"
+ - name: "env"
+ value: "production"
+ - name: "traceId"
+ value: "${ctx:traceId}"
+ - name: "spanId"
+ value: "${ctx:spanId}"
+ - name: "executionType"
+ value: "${ctx:executionType:-application}"
+ - name: "sourceId"
+ value: "${ctx:sourceId}"
+ - name: "runId"
+ value: "${ctx:runId}"
+
+ JDBC:
+ name: workflow-appender
+ tableName: "execution_log"
+ bufferSize: 0
+ ignoreExceptions: false
+
+ DriverManager:
+ connectionString: "jdbc:mariadb://${env:DB_HOST}:${env:DB_PORT}/${env:DB_NAME}"
+ driverClassName: "org.mariadb.jdbc.Driver"
+ userName: "${env:DB_USER}"
+ password: "${env:DB_PASS}"
+
+ ColumnMapping:
+ - name: "execution_type"
+ pattern: "%X{executionType}"
+ - name: "source_id"
+ pattern: "%X{sourceId}"
+ - name: "log_level"
+ pattern: "%level"
+ - name: "executed_at"
+ pattern: "%d{yyyy-MM-dd HH:mm:ss.SSS}"
+ - name: "log_message"
+ pattern: "%message"
+ - name: "trace_id"
+ pattern: "%X{traceId}"
+ - name: "reserved1"
+ pattern: "%X{spanId}"
+
File:
- - name: file-info-appender
- fileName: ${info-log}
+ - name: file-prod-appender
+ fileName: ${prod-log}
PatternLayout:
pattern: ${file-layout-pattern}
+ # 로그 파일 롤링 설정
+ Policies:
+ TimeBasedTriggeringPolicy:
+ interval: 1
+ modulate: true
+ SizeBasedTriggeringPolicy:
+ size: "100 MB"
+ DefaultRolloverStrategy:
+ max: 30
+ filePattern: ${log-path}/production/app.%d{yyyy-MM-dd}.%i.log
+
- name: file-error-appender
fileName: ${error-log}
PatternLayout:
pattern: ${file-layout-pattern}
- - name: file-auth-appender
- fileName: ${auth-log}
- PatternLayout:
- pattern: ${file-layout-pattern}
- - name: file-json-info-appender
- fileName: ${json-log}
- PatternLayout:
- pattern: ${file-layout-pattern}
+ ThresholdFilter:
+ level: ERROR
+ Policies:
+ TimeBasedTriggeringPolicy:
+ interval: 1
+ modulate: true
+ SizeBasedTriggeringPolicy:
+ size: "100 MB"
+ DefaultRolloverStrategy:
+ max: 30
+ filePattern: ${log-path}/production/error.%d{yyyy-MM-dd}.%i.log
- # [Loggers] 로그 출력 범위를 정의
Loggers:
- # [Loggers - Root] 모든 로그를 기록하는 최상위 로그를 정의
+ # Root 로거 - 프로덕션에서는 WARN 레벨
Root:
- level: OFF
+ level: WARN
AppenderRef:
- ref: console-appender
- - ref: rolling-file-appender
- # [Loggers - Loggers] 특정 패키지나 클래스에 대한 로그를 정의
Logger:
- # 1. Spring Framework 로그
- - name: org.springframework
+ # 애플리케이션 로그 - 프로덕션에서는 INFO 레벨
+ - name: site.icebang
additivity: "false"
- level: DEBUG
+ level: INFO
AppenderRef:
- ref: console-appender
- - ref: file-info-appender
+ - ref: loki-appender
+ - ref: file-prod-appender
- ref: file-error-appender
- # 2. 애플리케이션 로그
- - name: site.icebang
+ - name: "WORKFLOW_HISTORY"
+ level: INFO
additivity: "false"
- level: TRACE
AppenderRef:
+ - ref: workflow-appender
+ - ref: loki-appender
- ref: console-appender
- - ref: file-info-appender
+ - ref: file-prod-appender
- ref: file-error-appender
- # 3. HikariCP 로그 비활성화
- - name: com.zaxxer.hikari
- level: OFF
+ # Spring Framework - 프로덕션에서는 WARN 레벨
+ - name: org.springframework
+ additivity: "false"
+ level: WARN
+ AppenderRef:
+ - ref: console-appender
+ - ref: file-prod-appender
- # 4. Spring Security 로그 - 인증/인가 추적에 중요
+ # Spring Security - 프로덕션에서는 WARN 레벨
- name: org.springframework.security
- level: DEBUG
+ level: WARN
additivity: "false"
AppenderRef:
- ref: console-appender
- - ref: file-auth-appender
+ - ref: file-prod-appender
- # 5. 웹 요청 로그 - 요청 처리 과정 추적
+ # 웹 요청 로그 - 프로덕션에서는 INFO 레벨
- name: org.springframework.web
- level: DEBUG
+ level: INFO
additivity: "false"
AppenderRef:
- ref: console-appender
- - ref: file-info-appender
+ - ref: file-prod-appender
+ - ref: loki-appender
- # 6. 트랜잭션 로그 - DB 작업 추적
+ # 트랜잭션 로그 - 프로덕션에서는 WARN 레벨
- name: org.springframework.transaction
- level: DEBUG
+ level: WARN
additivity: "false"
AppenderRef:
- ref: console-appender
- - ref: file-info-appender
\ No newline at end of file
+ - ref: file-prod-appender
+
+ # HikariCP 로그 비활성화
+ - name: com.zaxxer.hikari
+ level: "OFF"
+
+ # SQL 로그 비활성화 - 프로덕션에서는 성능상 비활성화
+ - name: org.hibernate.SQL
+ level: "OFF"
+
+ # 파라미터 바인딩 로그 비활성화
+ - name: org.hibernate.type.descriptor.sql.BasicBinder
+ level: "OFF"
\ No newline at end of file
diff --git a/apps/user-service/src/main/resources/log4j2-test-e2e.yml b/apps/user-service/src/main/resources/log4j2-test-e2e.yml
new file mode 100644
index 00000000..53acccf2
--- /dev/null
+++ b/apps/user-service/src/main/resources/log4j2-test-e2e.yml
@@ -0,0 +1,183 @@
+Configuration:
+ status: DEBUG
+ name: e2e
+
+ properties:
+ property:
+ - name: "app-name"
+ value: "user-service"
+ - name: "log-path"
+ value: "./logs"
+ - name: "charset-UTF-8"
+ value: "UTF-8"
+ # DEBUG 환경용 콘솔 패턴 - 더 간단하고 가독성 좋게
+ - name: "console-layout-pattern"
+ value: "%highlight{[%-5level]} [%X{traceId}] [%X{spanId}] %d{HH:mm:ss} [%t] %n %logger{20} - %msg%n%n "
+ # 파일용 패턴
+ - name: "file-layout-pattern"
+ value: "[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"
+ # 개발 환경용 로그 파일들
+ - name: "dev-log"
+ value: ${log-path}/develop/app.log
+ - name: "error-log"
+ value: ${log-path}/develop/error.log
+
+ Appenders:
+ # 콘솔 출력 - 개발 시 주요 출력
+ Console:
+ name: console-appender
+ target: SYSTEM_OUT
+ PatternLayout:
+ pattern: ${console-layout-pattern}
+ disableAnsi: false
+
+ Loki:
+ name: loki-appender
+ host: localhost
+ port: "${loki-port}"
+ JsonLayout:
+ compact: true
+ eventEol: true
+ includeStacktrace: true
+ KeyValuePair:
+ - key: "app"
+ value: "${app-name}"
+ - key: "env"
+ value: "test-e2e"
+ Label:
+ - name: "app"
+ value: "${app-name}"
+ - name: "env"
+ value: "test-e2e"
+ - name: "traceId"
+ value: "${ctx:traceId}"
+ - name: "spanId"
+ value: "${ctx:spanId}"
+ - name: "executionType"
+ value: "${ctx:executionType:-application}"
+ - name: "sourceId"
+ value: "${ctx:sourceId}"
+ - name: "runId"
+ value: "${ctx:runId}"
+
+ JDBC:
+ name: workflow-appender
+ tableName: "execution_log"
+ bufferSize: 0
+ ignoreExceptions: false
+
+ DriverManager:
+ connectionString: ${DriverManager.connectionString}
+ driverClassName: ${DriverManager.driverClassName}
+ userName: ${DriverManager.userName}
+ password: ${DriverManager.password}
+
+ ColumnMapping:
+ - name: "execution_type"
+ pattern: "%X{executionType}"
+ - name: "source_id"
+ pattern: "%X{sourceId}"
+ - name: "log_level"
+ pattern: "%level"
+ - name: "executed_at"
+ pattern: "%d{yyyy-MM-dd HH:mm:ss.SSS}" # 패턴으로 시간 직접 지정
+ - name: "log_message"
+ pattern: "%message"
+ - name: "trace_id"
+ pattern: "%X{traceId}"
+ - name: "reserved1"
+ pattern: "%X{spanId}"
+
+ # 개발용 일반 로그 파일
+ File:
+ - name: file-dev-appender
+ fileName: ${dev-log}
+ PatternLayout:
+ pattern: ${file-layout-pattern}
+ - name: file-error-appender
+ fileName: ${error-log}
+ PatternLayout:
+ pattern: ${file-layout-pattern}
+ ThresholdFilter:
+ level: ERROR
+
+ Loggers:
+ # Root 로거 - 개발환경에서는 기본적으로 INFO 레벨
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: console-appender
+
+ Logger:
+ # 애플리케이션 로그 - 개발 시 모든 레벨 + Loki 전송
+ - name: site.icebang
+ additivity: false
+ level: DEBUG
+ AppenderRef:
+ - ref: console-appender
+ - ref: loki-appender
+ - ref: file-dev-appender
+ - ref: file-error-appender
+
+ - name: "WORKFLOW_HISTORY"
+ level: DEBUG
+ additivity: "false"
+ AppenderRef:
+ - ref: workflow-appender
+ - ref: loki-appender
+ - ref: console-appender
+ - ref: file-dev-appender
+ - ref: file-error-appender
+
+ # Spring Framework - 개발 시 필요한 정보만
+ - name: org.springframework
+ additivity: false
+ level: INFO
+ AppenderRef:
+ - ref: console-appender
+ - ref: file-dev-appender
+
+ # Spring Security - 인증 디버깅용
+ - name: org.springframework.security
+ level: DEBUG
+ additivity: false
+ AppenderRef:
+ - ref: console-appender
+ - ref: file-dev-appender
+ - ref: loki-appender
+
+ # 웹 요청 로그 - API 개발 시 유용
+ - name: org.springframework.web
+ level: DEBUG
+ additivity: false
+ AppenderRef:
+ - ref: console-appender
+ - ref: file-dev-appender
+ - ref: loki-appender
+
+ # 트랜잭션 로그 - DB 작업 디버깅
+ - name: org.springframework.transaction
+ level: DEBUG
+ additivity: false
+ AppenderRef:
+ - ref: console-appender
+ - ref: file-dev-appender
+ - ref: loki-appender
+
+ # HikariCP 로그 비활성화
+ - name: com.zaxxer.hikari
+ level: "OFF"
+
+ # SQL 로그 - 개발 시 쿼리 확인용 (필요시 활성화)
+ - name: org.hibernate.SQL
+ level: DEBUG
+ additivity: false
+ AppenderRef:
+ - ref: console-appender
+
+ # 파라미터 바인딩 로그 (필요시 활성화)
+ - name: org.hibernate.type.descriptor.sql.BasicBinder
+ level: TRACE
+ additivity: false
+ AppenderRef:
+ - ref: console-appender
\ No newline at end of file
diff --git a/apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml
new file mode 100644
index 00000000..54e29ae4
--- /dev/null
+++ b/apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SELECT j.* FROM job j
+ JOIN workflow_job wj ON j.id = wj.job_id
+ WHERE wj.workflow_id = #{workflowId}
+ ORDER BY wj.execution_order ASC
+
+
+
+ SELECT t.* FROM task t
+ JOIN job_task jt ON t.id = jt.task_id
+ WHERE jt.job_id = #{jobId}
+ ORDER BY jt.execution_order ASC
+
+
\ No newline at end of file
diff --git a/apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml
new file mode 100644
index 00000000..4fd0ea3d
--- /dev/null
+++ b/apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml
@@ -0,0 +1,28 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ INSERT INTO job_run (workflow_run_id, job_id, status, started_at, created_at)
+ VALUES (#{workflowRunId}, #{jobId}, #{status}, #{startedAt}, #{createdAt})
+
+
+
+ UPDATE job_run
+ SET status = #{status},
+ finished_at = #{finishedAt}
+ WHERE id = #{id}
+
+
+
\ No newline at end of file
diff --git a/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml
index 3cdcc90e..2a5480e3 100644
--- a/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml
+++ b/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml
@@ -1,17 +1,11 @@
-
+
+
+
- SELECT
- id AS scheduleId,
- workflow_id AS workflowId,
- cron_expression AS cronExpression,
- is_active AS isActive
- FROM
- schedule
- WHERE
- is_active = #{isActive}
+
+ SELECT * FROM schedule WHERE is_active = true
-
\ No newline at end of file
diff --git a/apps/user-service/src/main/resources/mybatis/mapper/TaskMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/TaskMapper.xml
new file mode 100644
index 00000000..7604cb94
--- /dev/null
+++ b/apps/user-service/src/main/resources/mybatis/mapper/TaskMapper.xml
@@ -0,0 +1,18 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SELECT * FROM task
+ WHERE id = #{taskId}
+
+
+
\ No newline at end of file
diff --git a/apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml
new file mode 100644
index 00000000..582af278
--- /dev/null
+++ b/apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml
@@ -0,0 +1,17 @@
+
+
+
+
+
+ INSERT INTO task_run (job_run_id, task_id, status, started_at, created_at)
+ VALUES (#{jobRunId}, #{taskId}, #{status}, #{startedAt}, #{createdAt})
+
+
+
+ UPDATE task_run
+ SET status = #{status},
+ finished_at = #{finishedAt},
+ result_message = #{resultMessage}
+ WHERE id = #{id}
+
+
\ No newline at end of file
diff --git a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml
new file mode 100644
index 00000000..dacade96
--- /dev/null
+++ b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml
@@ -0,0 +1,42 @@
+
+
+
+
+
+ SELECT
+ w.id,
+ w.name,
+ w.description,
+ w.is_enabled as isEnabled,
+ u.name as createdBy,
+ w.created_at as createdAt
+ FROM workflow w
+ LEFT JOIN user u ON w.created_by = u.id
+ WHERE 1=1
+ ORDER BY w.created_at DESC
+ LIMIT #{pageSize} OFFSET #{offset}
+
+
+
+ SELECT COUNT(*)
+ FROM workflow w
+ WHERE 1=1
+
+
+
+ SELECT
+ w.id,
+ w.name,
+ w.description,
+ w.is_enabled as isEnabled,
+ u.name as createdBy,
+ w.created_at as createdAt
+ FROM workflow w
+ LEFT JOIN user u ON w.created_by = u.id
+ WHERE w.id = #{id}
+
+
\ No newline at end of file
diff --git a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml
new file mode 100644
index 00000000..224abd02
--- /dev/null
+++ b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml
@@ -0,0 +1,28 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ INSERT INTO workflow_run (workflow_id, trace_id, status, started_at, created_at)
+ VALUES (#{workflowId}, #{traceId}, #{status}, #{startedAt}, #{createdAt})
+
+
+
+ UPDATE workflow_run
+ SET status = #{status},
+ finished_at = #{finishedAt}
+ WHERE id = #{id}
+
+
+
\ No newline at end of file
diff --git a/apps/user-service/src/main/resources/sql/01-schema-h2.sql b/apps/user-service/src/main/resources/sql/01-schema-h2.sql
new file mode 100644
index 00000000..018ebb1d
--- /dev/null
+++ b/apps/user-service/src/main/resources/sql/01-schema-h2.sql
@@ -0,0 +1,328 @@
+-- H2 데이터베이스 호환 스키마 (테스트용)
+-- MySQL의 unsigned, AFTER 절 등을 H2 호환으로 변경
+
+CREATE TABLE `permission` (
+ `id` int NOT NULL AUTO_INCREMENT,
+ `resource` varchar(100) NULL,
+ `description` varchar(255) NULL,
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+ `is_active` boolean DEFAULT TRUE,
+ `updated_by` bigint NULL,
+ `created_by` bigint NULL,
+ PRIMARY KEY (`id`)
+);
+
+CREATE TABLE `organization` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `name` varchar(150) NULL,
+ `domain_name` varchar(100) NULL,
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+ PRIMARY KEY (`id`)
+);
+
+CREATE TABLE `role` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `organization_id` bigint NULL,
+ `name` varchar(100) NULL,
+ `description` varchar(500) NULL,
+ PRIMARY KEY (`id`)
+);
+
+CREATE TABLE `user` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `name` varchar(50) NULL,
+ `email` varchar(100) NULL,
+ `password` varchar(255) NULL,
+ `status` varchar(20) NULL,
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+ PRIMARY KEY (`id`)
+);
+
+CREATE TABLE `department` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `organization_id` bigint NOT NULL,
+ `name` varchar(100) NULL,
+ PRIMARY KEY (`id`)
+);
+
+CREATE TABLE `position` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `organization_id` bigint NOT NULL,
+ `title` varchar(100) NULL,
+ PRIMARY KEY (`id`)
+);
+
+CREATE TABLE `user_organization` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `user_id` bigint NOT NULL,
+ `organization_id` bigint NOT NULL,
+ `position_id` bigint NOT NULL,
+ `department_id` bigint NOT NULL,
+ `employee_number` varchar(50) NULL,
+ `status` varchar(20) NULL,
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+ PRIMARY KEY (`id`)
+);
+
+CREATE TABLE `role_permission` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `role_id` bigint NOT NULL,
+ `permission_id` int NOT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `uk_role_permission` (`role_id`, `permission_id`)
+);
+
+CREATE TABLE `user_role` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `role_id` bigint NOT NULL,
+ `user_organization_id` bigint NOT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `uk_user_role` (`role_id`, `user_organization_id`)
+);
+
+-- 성능 최적화를 위한 인덱스
+CREATE INDEX `idx_user_email` ON `user` (`email`);
+CREATE INDEX `idx_user_status` ON `user` (`status`);
+CREATE INDEX `idx_user_organization_user` ON `user_organization` (`user_id`);
+CREATE INDEX `idx_user_organization_org` ON `user_organization` (`organization_id`);
+CREATE INDEX `idx_user_organization_status` ON `user_organization` (`status`);
+CREATE INDEX `idx_role_org` ON `role` (`organization_id`);
+CREATE INDEX `idx_permission_resource` ON `permission` (`resource`);
+CREATE INDEX `idx_permission_active` ON `permission` (`is_active`);
+
+CREATE TABLE `workflow` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `name` varchar(100) NOT NULL UNIQUE,
+ `description` text NULL,
+ `is_enabled` boolean DEFAULT TRUE,
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ `created_by` bigint NULL,
+ `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+ `updated_by` bigint NULL,
+ PRIMARY KEY (`id`)
+);
+
+CREATE TABLE `schedule` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `workflow_id` bigint NOT NULL,
+ `cron_expression` varchar(50) NULL,
+ `parameters` json NULL,
+ `is_active` boolean DEFAULT TRUE,
+ `last_run_status` varchar(20) NULL,
+ `last_run_at` timestamp NULL,
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ `created_by` bigint NULL,
+ `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+ `updated_by` bigint NULL,
+ PRIMARY KEY (`id`)
+);
+
+CREATE TABLE `job` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `name` varchar(100) NOT NULL UNIQUE,
+ `description` text NULL,
+ `is_enabled` boolean DEFAULT TRUE,
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ `created_by` bigint NULL,
+ `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+ `updated_by` bigint NULL,
+ PRIMARY KEY (`id`)
+);
+
+CREATE TABLE `task` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `name` varchar(100) NOT NULL UNIQUE,
+ `type` varchar(50) NULL,
+ `parameters` json NULL,
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+ PRIMARY KEY (`id`)
+);
+
+CREATE TABLE `workflow_job` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `workflow_id` bigint NOT NULL,
+ `job_id` bigint NOT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `uk_workflow_job` (`workflow_id`, `job_id`)
+);
+
+CREATE TABLE `job_task` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `job_id` bigint NOT NULL,
+ `task_id` bigint NOT NULL,
+ `execution_order` int NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `uk_job_task` (`job_id`, `task_id`)
+);
+
+CREATE TABLE `execution_log` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `execution_type` varchar(20) NULL COMMENT 'task, schedule, job, workflow',
+ `source_id` bigint NULL COMMENT '모든 데이터에 대한 ID ex: job_id, schedule_id, task_id, ...',
+ `log_level` varchar(20) NULL,
+ `executed_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ `log_message` text NULL,
+ `trace_id` char(36) NULL,
+ `config_snapshot` json NULL,
+ PRIMARY KEY (`id`),
+ INDEX `idx_source_id_type` (`source_id`, `execution_type`)
+);
+
+CREATE TABLE `task_io_data` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `task_run_id` bigint NOT NULL,
+ `io_type` varchar(10) NOT NULL COMMENT 'INPUT, OUTPUT',
+ `name` varchar(100) NOT NULL COMMENT '파라미터/변수 이름',
+ `data_type` varchar(50) NOT NULL COMMENT 'string, number, json, file, etc',
+ `data_value` json NULL COMMENT '실제 데이터 값',
+ `data_size` bigint NULL COMMENT '데이터 크기 (bytes)',
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ PRIMARY KEY (`id`),
+ INDEX `idx_task_io_task_run_id` (`task_run_id`),
+ INDEX `idx_task_io_type` (`io_type`),
+ INDEX `idx_task_io_name` (`name`)
+);
+
+CREATE TABLE `config` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `target_type` varchar(50) NULL COMMENT 'user, job, workflow',
+ `target_id` bigint NULL,
+ `version` int NULL,
+ `json` json NULL,
+ `is_active` boolean DEFAULT TRUE,
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ `created_by` bigint NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `uk_config_target` (`target_type`, `target_id`)
+);
+
+CREATE TABLE `category` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `name` varchar(100) NULL,
+ `description` text NULL,
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+ PRIMARY KEY (`id`)
+);
+
+CREATE TABLE `user_config` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `user_id` bigint NOT NULL,
+ `type` varchar(50) NULL,
+ `name` varchar(100) NULL,
+ `json` json NULL,
+ `is_active` boolean DEFAULT TRUE,
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+ PRIMARY KEY (`id`)
+);
+
+-- 인덱스 추가 (성능 최적화)
+CREATE INDEX `idx_schedule_workflow` ON `schedule` (`workflow_id`);
+CREATE INDEX `idx_job_enabled` ON `job` (`is_enabled`);
+CREATE INDEX `idx_task_type` ON `task` (`type`);
+CREATE INDEX `idx_workflow_enabled` ON `workflow` (`is_enabled`);
+CREATE UNIQUE INDEX `uk_schedule_workflow` ON `schedule` (`workflow_id`);
+CREATE UNIQUE INDEX `uk_job_name` ON `job` (`name`);
+CREATE UNIQUE INDEX `uk_task_name` ON `task` (`name`);
+CREATE UNIQUE INDEX `uk_workflow_name` ON `workflow` (`name`);
+CREATE INDEX `idx_user_config_user` ON `user_config` (`user_id`);
+
+-- 워크플로우 실행 테이블
+CREATE TABLE `workflow_run` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `workflow_id` bigint NOT NULL,
+ `trace_id` char(36) NOT NULL,
+ `run_number` varchar(20) NULL,
+ `status` varchar(20) NULL COMMENT 'pending, running, success, failed, cancelled',
+ `trigger_type` varchar(20) NULL COMMENT 'manual, schedule, push, pull_request',
+ `started_at` timestamp NULL,
+ `finished_at` timestamp NULL,
+ `created_by` bigint NULL,
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `uk_workflow_run_trace` (`trace_id`),
+ INDEX `idx_workflow_run_status` (`status`),
+ INDEX `idx_workflow_run_workflow_id` (`workflow_id`),
+ INDEX `idx_workflow_run_created_at` (`created_at`)
+);
+
+-- Job 실행 테이블
+CREATE TABLE `job_run` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `workflow_run_id` bigint NOT NULL,
+ `job_id` bigint NOT NULL,
+ `status` varchar(20) NULL COMMENT 'pending, running, success, failed, cancelled, skipped',
+ `started_at` timestamp NULL,
+ `finished_at` timestamp NULL,
+ `execution_order` int NULL,
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ PRIMARY KEY (`id`),
+ INDEX `idx_job_run_workflow_run_id` (`workflow_run_id`),
+ INDEX `idx_job_run_status` (`status`),
+ INDEX `idx_job_run_job_id` (`job_id`)
+);
+
+-- Task 실행 테이블
+CREATE TABLE `task_run` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `job_run_id` bigint NOT NULL,
+ `task_id` bigint NOT NULL,
+ `status` varchar(20) NULL COMMENT 'pending, running, success, failed, cancelled, skipped',
+ `started_at` timestamp NULL,
+ `finished_at` timestamp NULL,
+ `execution_order` int NULL,
+ `created_at` timestamp DEFAULT CURRENT_TIMESTAMP,
+ PRIMARY KEY (`id`),
+ INDEX `idx_task_run_job_run_id` (`job_run_id`),
+ INDEX `idx_task_run_status` (`status`),
+ INDEX `idx_task_run_task_id` (`task_id`)
+);
+
+-- v0.0.3 - H2 호환 버전
+DROP TABLE IF EXISTS `config`;
+
+-- H2에서는 한 번에 하나씩 컬럼 추가
+ALTER TABLE `workflow_job` ADD COLUMN `execution_order` INT NULL;
+
+ALTER TABLE `schedule` ADD COLUMN `schedule_text` varchar(20) NULL;
+
+ALTER TABLE `workflow` ADD COLUMN `default_config` json NULL;
+
+ALTER TABLE `user` ADD COLUMN `joined_at` timestamp NULL;
+
+ALTER TABLE `department` ADD COLUMN `description` varchar(100) NULL;
+
+-- v0.4 - H2 호환 버전 (AFTER 절 제거, unsigned 제거, 개별 ALTER 구문으로 분리)
+-- execution_log 테이블 컬럼 추가 (H2 호환)
+ALTER TABLE `execution_log` ADD COLUMN `run_id` bigint NULL;
+ALTER TABLE `execution_log` ADD COLUMN `status` varchar(20) NULL;
+ALTER TABLE `execution_log` ADD COLUMN `duration_ms` int NULL;
+ALTER TABLE `execution_log` ADD COLUMN `error_code` varchar(50) NULL;
+ALTER TABLE `execution_log` ADD COLUMN `reserved1` varchar(100) NULL;
+ALTER TABLE `execution_log` ADD COLUMN `reserved2` varchar(100) NULL;
+ALTER TABLE `execution_log` ADD COLUMN `reserved3` int NULL;
+ALTER TABLE `execution_log` ADD COLUMN `reserved4` json NULL;
+ALTER TABLE `execution_log` ADD COLUMN `reserved5` timestamp NULL;
+
+-- 기존 컬럼 수정 (H2 호환)
+ALTER TABLE `execution_log` ALTER COLUMN `log_message` varchar(500) NOT NULL;
+ALTER TABLE `execution_log` ALTER COLUMN `executed_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP;
+
+-- 기존 불필요한 컬럼 제거
+ALTER TABLE `execution_log` DROP COLUMN IF EXISTS `config_snapshot`;
+
+-- 새로운 인덱스 추가
+CREATE INDEX `idx_run_id` ON `execution_log` (`run_id`);
+CREATE INDEX `idx_log_level_status` ON `execution_log` (`log_level`, `status`);
+CREATE INDEX `idx_error_code` ON `execution_log` (`error_code`);
+CREATE INDEX `idx_duration` ON `execution_log` (`duration_ms`);
+
+-- 기존 인덱스 수정
+DROP INDEX IF EXISTS `idx_source_id_type`;
+CREATE INDEX `idx_execution_type_source` ON `execution_log` (`execution_type`, `source_id`);
diff --git a/apps/user-service/src/main/resources/sql/01-schema.sql b/apps/user-service/src/main/resources/sql/01-schema.sql
index 569c452a..31242c33 100644
--- a/apps/user-service/src/main/resources/sql/01-schema.sql
+++ b/apps/user-service/src/main/resources/sql/01-schema.sql
@@ -284,4 +284,52 @@ CREATE TABLE `task_run` (
INDEX `idx_task_run_job_run_id` (`job_run_id`),
INDEX `idx_task_run_status` (`status`),
INDEX `idx_task_run_task_id` (`task_id`)
- );
\ No newline at end of file
+ );
+
+-- v0.0.3
+DROP TABLE IF EXISTS `config`;
+
+ALTER TABLE `workflow_job`
+ ADD COLUMN `execution_order` INT NULL AFTER `job_id`;
+
+
+ALTER TABLE `schedule`
+ ADD COLUMN `schedule_text` varchar(20) NULL;
+
+ALTER TABLE `workflow`
+ ADD COLUMN `default_config`json NULL;
+
+
+ALTER TABLE `user`
+ ADD COLUMN `joined_at` timestamp NULL;
+
+ALTER TABLE `department`
+ ADD COLUMN `description` varchar(100) NULL;
+
+-- v0.4
+-- 기존 execution_log 테이블 수정
+-- 컬럼 추가 (한 번에 하나씩)
+-- 컬럼 추가
+ALTER TABLE execution_log ADD COLUMN run_id BIGINT NULL;
+ALTER TABLE execution_log ADD COLUMN status VARCHAR(20) NULL;
+ALTER TABLE execution_log ADD COLUMN duration_ms INT NULL;
+ALTER TABLE execution_log ADD COLUMN error_code VARCHAR(50) NULL;
+ALTER TABLE execution_log ADD COLUMN reserved1 VARCHAR(100) NULL;
+ALTER TABLE execution_log ADD COLUMN reserved2 VARCHAR(100) NULL;
+ALTER TABLE execution_log ADD COLUMN reserved3 INT NULL;
+ALTER TABLE execution_log ADD COLUMN reserved4 json NULL;
+ALTER TABLE execution_log ADD COLUMN reserved5 TIMESTAMP NULL;
+
+-- 컬럼 수정
+ALTER TABLE execution_log MODIFY COLUMN log_message VARCHAR(500) NOT NULL;
+ALTER TABLE execution_log MODIFY COLUMN executed_at TIMESTAMP NOT NULL;
+
+-- 컬럼 삭제
+ALTER TABLE execution_log DROP COLUMN config_snapshot;
+
+-- 인덱스 생성 (CREATE INDEX 별도)
+CREATE INDEX idx_run_id ON execution_log(run_id);
+CREATE INDEX idx_log_level_status ON execution_log(log_level, status);
+CREATE INDEX idx_error_code ON execution_log(error_code);
+CREATE INDEX idx_duration ON execution_log(duration_ms);
+CREATE INDEX idx_execution_type_source ON execution_log(execution_type, source_id);
diff --git a/apps/user-service/src/main/resources/sql/03-insert-workflow.sql b/apps/user-service/src/main/resources/sql/03-insert-workflow.sql
new file mode 100644
index 00000000..dd2ddb15
--- /dev/null
+++ b/apps/user-service/src/main/resources/sql/03-insert-workflow.sql
@@ -0,0 +1,120 @@
+-- 워크플로우 관련 데이터 삽입
+
+-- 카테고리 삽입
+INSERT INTO `category` (`name`, `description`) VALUES
+ ('마케팅', '마케팅 관련 자동화 워크플로우'),
+ ('콘텐츠', '콘텐츠 생성 및 관리'),
+ ('데이터 수집', '웹 크롤링 및 데이터 수집 관련');
+
+-- 워크플로우 생성
+INSERT INTO `workflow` (`name`, `description`, `is_enabled`, `created_by`) VALUES
+ ('트렌드_블로그_자동화', '트렌드 검색부터 블로그 글 작성까지 전체 자동화 프로세스', TRUE, 1);
+
+-- Job 생성
+INSERT INTO `job` (`name`, `description`, `is_enabled`, `created_by`) VALUES
+ ('트렌드_검색_작업', '최신 트렌드 키워드 검색 및 분석', TRUE, 1),
+ ('싸다구_크롤링_작업', '싸다구 사이트에서 관련 상품 정보 크롤링', TRUE, 1),
+ ('블로그_글_작성_작업', '수집된 데이터를 바탕으로 블로그 글 자동 생성', TRUE, 1);
+
+-- Task 생성
+INSERT INTO `task` (`name`, `type`, `parameters`) VALUES
+-- 트렌드 검색 관련 태스크
+('구글_트렌드_검색', 'API_CALL', JSON_OBJECT(
+ 'api_endpoint', 'https://trends.googleapis.com/trends/api',
+ 'search_region', 'KR',
+ 'timeframe', 'now 7-d',
+ 'category', '0'
+ )),
+('네이버_트렌드_검색', 'API_CALL', JSON_OBJECT(
+ 'api_endpoint', 'https://datalab.naver.com/keyword/trendSearch.naver',
+ 'period', 'week',
+ 'device', 'pc'
+ )),
+('키워드_분석_및_필터링', 'DATA_PROCESSING', JSON_OBJECT(
+ 'min_score', 50,
+ 'max_keywords', 10,
+ 'filter_rules', JSON_ARRAY('adult_content', 'spam_keywords')
+ )),
+
+-- 싸다구 크롤링 관련 태스크
+('싸다구_상품_검색', 'WEB_SCRAPING', JSON_OBJECT(
+ 'base_url', 'https://www.ssg.com',
+ 'search_path', '/search.ssg',
+ 'max_pages', 3,
+ 'delay_ms', 2000
+ )),
+('상품_정보_추출', 'DATA_EXTRACTION', JSON_OBJECT(
+ 'extract_fields', JSON_ARRAY('title', 'price', 'rating', 'review_count', 'image_url'),
+ 'data_validation', true
+ )),
+('가격_비교_분석', 'DATA_ANALYSIS', JSON_OBJECT(
+ 'comparison_sites', JSON_ARRAY('쿠팡', '11번가', '옥션'),
+ 'price_threshold', 0.1
+ )),
+
+-- 블로그 글 작성 관련 태스크
+('블로그_템플릿_선택', 'TEMPLATE_PROCESSING', JSON_OBJECT(
+ 'template_type', 'product_review',
+ 'style', 'conversational',
+ 'target_length', 1500
+ )),
+('AI_콘텐츠_생성', 'AI_GENERATION', JSON_OBJECT(
+ 'model', 'gpt-4',
+ 'temperature', 0.7,
+ 'max_tokens', 2000,
+ 'prompt_template', '트렌드 키워드와 상품 정보를 바탕으로 자연스러운 블로그 글을 작성해주세요.'
+ )),
+('콘텐츠_검수_및_최적화', 'CONTENT_REVIEW', JSON_OBJECT(
+ 'seo_optimization', true,
+ 'readability_check', true,
+ 'plagiarism_check', true
+ )),
+('블로그_플랫폼_발행', 'PUBLISHING', JSON_OBJECT(
+ 'platforms', JSON_ARRAY('네이버 블로그', '티스토리', '브런치'),
+ 'schedule_publish', false,
+ 'auto_tags', true
+ ));
+
+-- 워크플로우-Job 연결
+INSERT INTO `workflow_job` (`workflow_id`, `job_id`) VALUES
+ (1, 1), -- 트렌드_블로그_자동화 + 트렌드_검색_작업
+ (1, 2), -- 트렌드_블로그_자동화 + 싸다구_크롤링_작업
+ (1, 3); -- 트렌드_블로그_자동화 + 블로그_글_작성_작업
+
+-- Job-Task 연결 (실행 순서 포함)
+-- 트렌드 검색 작업의 태스크들
+INSERT INTO `job_task` (`job_id`, `task_id`, `execution_order`) VALUES
+ (1, 1, 1), -- 구글_트렌드_검색
+ (1, 2, 2), -- 네이버_트렌드_검색
+ (1, 3, 3); -- 키워드_분석_및_필터링
+
+-- 싸다구 크롤링 작업의 태스크들
+INSERT INTO `job_task` (`job_id`, `task_id`, `execution_order`) VALUES
+ (2, 4, 1), -- 싸다구_상품_검색
+ (2, 5, 2), -- 상품_정보_추출
+ (2, 6, 3); -- 가격_비교_분석
+
+-- 블로그 글 작성 작업의 태스크들
+INSERT INTO `job_task` (`job_id`, `task_id`, `execution_order`) VALUES
+ (3, 7, 1), -- 블로그_템플릿_선택
+ (3, 8, 2), -- AI_콘텐츠_생성
+ (3, 9, 3), -- 콘텐츠_검수_및_최적화
+ (3, 10, 4); -- 블로그_플랫폼_발행
+
+-- 스케줄 설정 (매일 오전 8시 실행)
+INSERT INTO `schedule` (`workflow_id`, `cron_expression`, `parameters`, `is_active`, `created_by`) VALUES
+ (1, '0 0 8 * * *', JSON_OBJECT(
+ 'timezone', 'Asia/Seoul',
+ 'retry_count', 3,
+ 'timeout_minutes', 60,
+ 'notification_email', 'admin@icebang.site'
+ ), TRUE, 1);
+
+-- 사용자별 설정 (관리자용)
+INSERT INTO `user_config` (`user_id`, `type`, `name`, `json`, `is_active`) VALUES
+ (1, 'workflow_preference', '트렌드_블로그_설정', JSON_OBJECT(
+ 'preferred_keywords', JSON_ARRAY('테크', 'IT', '트렌드', '리뷰'),
+ 'blog_style', 'casual',
+ 'auto_publish', false,
+ 'notification_enabled', true
+ ), TRUE);
\ No newline at end of file
diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java
new file mode 100644
index 00000000..03c5f899
--- /dev/null
+++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java
@@ -0,0 +1,159 @@
+package site.icebang.e2e.scenario;
+
+import static org.assertj.core.api.Assertions.*;
+import static org.assertj.core.api.Assertions.assertThat;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+import org.springframework.http.*;
+import org.springframework.test.context.jdbc.Sql;
+
+import site.icebang.e2e.setup.annotation.E2eTest;
+import site.icebang.e2e.setup.support.E2eTestSupport;
+
+@Sql(
+ value = {"classpath:sql/00-truncate.sql", "classpath:sql/01-insert-internal-users.sql"},
+ executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS)
+@DisplayName("사용자 로그아웃 플로우 E2E 테스트")
+@E2eTest
+class UserLogoutFlowE2eTest extends E2eTestSupport {
+
+ @SuppressWarnings("unchecked")
+ @Test
+ @DisplayName("정상 로그아웃 전체 플로우 - TDD REd 단계")
+ void completeUserRegistrationFlow_shouldFailBecauseApiNotImplemented() throws Exception {
+ logStep(1, "관리자 로그인 (최우선)");
+
+ // 1. 관리자 로그인으로 인증 상태 확립
+ Map loginRequest = new HashMap<>();
+ loginRequest.put("email", "admin@icebang.site");
+ loginRequest.put("password", "qwer1234!A");
+
+ HttpHeaders loginHeaders = new HttpHeaders();
+ loginHeaders.setContentType(MediaType.APPLICATION_JSON);
+ loginHeaders.set("Origin", "https://admin.icebang.site");
+ loginHeaders.set("Referer", "https://admin.icebang.site/");
+
+ HttpEntity> loginEntity = new HttpEntity<>(loginRequest, loginHeaders);
+
+ ResponseEntity loginResponse =
+ restTemplate.postForEntity(getV0ApiUrl("/auth/login"), loginEntity, Map.class);
+
+ assertThat(loginResponse.getStatusCode()).isEqualTo(HttpStatus.OK);
+ assertThat((Boolean) loginResponse.getBody().get("success")).isTrue();
+
+ logSuccess("관리자 로그인 성공 - 인증 상태 확립 완료");
+
+ logStep(2, "로그인 상태에서 보호된 리소스 접근 확인");
+
+ // 로그인 응답에서 세션 쿠키 추출
+ String sessionCookie = null;
+ java.util.List cookies = loginResponse.getHeaders().get("Set-Cookie");
+ if (cookies != null) {
+ for (String cookie : cookies) {
+ if (cookie.startsWith("JSESSIONID")) {
+ sessionCookie = cookie.split(";")[0]; // JSESSIONID=XXX 부분만 추출
+ break;
+ }
+ }
+ }
+
+ // 2. 로그인된 상태에서 본인 프로필 조회로 인증 상태 확인
+ // /v0/users/me는 인증된 사용자만 접근 가능한 일반적인 API
+ HttpHeaders authenticatedHeaders = new HttpHeaders();
+ if (sessionCookie != null) {
+ authenticatedHeaders.set("Cookie", sessionCookie);
+ }
+
+ HttpEntity authenticatedEntity = new HttpEntity<>(authenticatedHeaders);
+ ResponseEntity beforeLogoutResponse =
+ restTemplate.exchange(
+ getV0ApiUrl("/users/me"), HttpMethod.GET, authenticatedEntity, Map.class);
+
+ assertThat(beforeLogoutResponse.getStatusCode()).isEqualTo(HttpStatus.OK);
+ assertThat((Boolean) beforeLogoutResponse.getBody().get("success")).isTrue();
+ assertThat(beforeLogoutResponse.getBody().get("data")).isNotNull();
+
+ logSuccess("인증된 상태에서 본인 프로필 조회 성공");
+
+ // 3. 로그아웃 API 호출
+ HttpHeaders logoutHeaders = new HttpHeaders();
+ logoutHeaders.setContentType(MediaType.APPLICATION_JSON);
+ logoutHeaders.set("Origin", "https://admin.icebang.site");
+ logoutHeaders.set("Referer", "https://admin.icebang.site/");
+
+ // 로그아웃 요청에도 세션 쿠키 포함
+ if (sessionCookie != null) {
+ logoutHeaders.set("Cookie", sessionCookie);
+ }
+
+ HttpEntity> logoutEntity = new HttpEntity<>(new HashMap<>(), logoutHeaders);
+
+ try {
+ ResponseEntity logoutResponse =
+ restTemplate.postForEntity(getV0ApiUrl("/auth/logout"), logoutEntity, Map.class);
+ logStep(4, "로그아웃 응답 검증 (API구현 돼있으면)");
+
+ logSuccess("로그아웃 API 호출 성공");
+
+ logStep(5, "로그아웃 후 인증 무효화 확인");
+
+ // 5. 로그아웃 후 동일한 프로필 API 접근 시 인증 실패 확인
+ HttpEntity afterLogoutEntity = new HttpEntity<>(authenticatedHeaders);
+ ResponseEntity afterLogoutResponse =
+ restTemplate.exchange(
+ getV0ApiUrl("/users/me"), HttpMethod.GET, afterLogoutEntity, Map.class);
+
+ // 핵심 검증: 로그아웃 후에는 인증 실패로 401 또는 403 응답이어야 함
+ assertThat(afterLogoutResponse.getStatusCode())
+ .withFailMessage(
+ "로그아웃 후 프로필 접근이 차단되어야 합니다. 현재 상태코드: %s", afterLogoutResponse.getStatusCode())
+ .isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN);
+ logSuccess("로그아웃 후 프로필 접근 차단 확인 - 인증 무효화 성공");
+
+ logCompletion("일반 사용자 로그아웃 플로우");
+
+ } catch (org.springframework.web.client.HttpClientErrorException.NotFound ex) {
+ logError("예상된 실패: 로그아웃 API가 구현되지 않음 (404 Not Found");
+ logError("에러 메시지 : " + ex.getMessage());
+
+ fail(
+ "로그아웃 API (/v0/auth/logout)가 구현되지 않았습니다. "
+ + "다음 단계에서 API를 구현해야 합니다. 에러: "
+ + ex.getMessage());
+ } catch (Exception ex) {
+ logError("예상치 못한 오류 발생: " + ex.getClass().getSimpleName());
+ logError("에러 메시지: " + ex.getMessage());
+
+ // 기타 예상치 못한 에러도 기록
+ fail("로그아웃 API 호출 중 예상치 못한 오류 발생: " + ex.getMessage());
+ }
+ }
+
+ /** 일반 사용자 로그인을 수행하는 헬퍼 메서드 관리자가 아닌 콘텐츠팀장으로 로그인 */
+ private void performRegularUserLogin() {
+ Map loginRequest = new HashMap<>();
+ loginRequest.put("email", "viral.jung@icebang.site");
+ loginRequest.put("password", "qwer1234!A"); // 실제 비밀번호 확인 필요
+
+ HttpHeaders headers = new HttpHeaders();
+ headers.setContentType(MediaType.APPLICATION_JSON);
+ headers.set("Origin", "https://admin.icebang.site");
+ headers.set("Referer", "https://admin.icebang.site/");
+
+ HttpEntity> entity = new HttpEntity<>(loginRequest, headers);
+
+ ResponseEntity response =
+ restTemplate.postForEntity(getV0ApiUrl("/auth/login"), entity, Map.class);
+
+ if (response.getStatusCode() != HttpStatus.OK) {
+ logError("일반 사용자 로그인 실패: " + response.getStatusCode());
+ throw new RuntimeException("Regular user login failed for logout test");
+ }
+
+ logSuccess("일반 사용자 로그인 완료 (로그아웃 테스트용)");
+ }
+}
diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java
index a873d2d5..df66a7c6 100644
--- a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java
+++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java
@@ -7,7 +7,6 @@
import java.util.HashMap;
import java.util.Map;
-import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.springframework.http.*;
@@ -16,7 +15,7 @@
import site.icebang.e2e.setup.support.E2eTestSupport;
@Sql(
- value = "classpath:sql/01-insert-internal-users.sql",
+ value = {"classpath:sql/00-truncate.sql", "classpath:sql/01-insert-internal-users.sql"},
executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS)
@DisplayName("사용자 등록 플로우 E2E 테스트")
class UserRegistrationFlowE2eTest extends E2eTestSupport {
@@ -116,52 +115,6 @@ void completeUserRegistrationFlow() throws Exception {
logCompletion("ERP 사용자 등록 플로우");
}
- @Disabled
- @DisplayName("로그인 없이 리소스 접근 시 모든 요청 차단")
- void accessResourcesWithoutLogin_shouldFailForAll() {
- logStep(1, "인증 없이 조직 목록 조회 시도");
-
- // 1. 로그인 없이 조직 목록 조회 시도
- ResponseEntity orgResponse =
- restTemplate.getForEntity(getV0ApiUrl("/organizations"), Map.class);
-
- assertThat(orgResponse.getStatusCode()).isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN);
- logSuccess("미인증 조직 조회 차단 확인");
-
- logStep(2, "인증 없이 조직 옵션 조회 시도");
-
- // 2. 로그인 없이 조직 옵션 조회 시도
- ResponseEntity optResponse =
- restTemplate.getForEntity(getV0ApiUrl("/organizations/1/options"), Map.class);
-
- assertThat(optResponse.getStatusCode()).isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN);
- logSuccess("미인증 옵션 조회 차단 확인");
-
- logStep(3, "인증 없이 회원가입 시도");
-
- // 3. 로그인 없이 회원가입 시도
- Map registerRequest = new HashMap<>();
- registerRequest.put("name", "테스트사용자");
- registerRequest.put("email", "test@example.com");
- registerRequest.put("orgId", 1);
- registerRequest.put("deptId", 2);
- registerRequest.put("positionId", 5);
- registerRequest.put("roleIds", Arrays.asList(6));
-
- HttpHeaders headers = new HttpHeaders();
- headers.setContentType(MediaType.APPLICATION_JSON);
-
- HttpEntity> entity = new HttpEntity<>(registerRequest, headers);
-
- ResponseEntity regResponse =
- restTemplate.postForEntity(getV0ApiUrl("/auth/register"), entity, Map.class);
-
- assertThat(regResponse.getStatusCode()).isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN);
- logSuccess("미인증 회원가입 차단 확인");
-
- logCompletion("ERP 보안 검증");
- }
-
@Test
@DisplayName("잘못된 자격증명으로 로그인 시도 시 실패")
void loginWithInvalidCredentials_shouldFail() {
@@ -200,7 +153,7 @@ void loginWithInvalidCredentials_shouldFail() {
}
@SuppressWarnings("unchecked")
- @Disabled
+ @Test
@DisplayName("중복 이메일로 사용자 등록 시도 시 실패")
void register_withDuplicateEmail_shouldFail() {
// 선행 조건: 관리자 로그인
diff --git a/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java
index 4976d0b8..dd5e0d1a 100644
--- a/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java
+++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java
@@ -5,8 +5,12 @@
import org.springframework.context.annotation.Bean;
import org.springframework.test.context.DynamicPropertyRegistry;
import org.springframework.test.context.DynamicPropertySource;
+import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.MariaDBContainer;
+import org.testcontainers.containers.Network;
+import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper;
+import org.testcontainers.utility.DockerImageName;
@TestConfiguration(proxyBeanMethods = false)
public class E2eTestConfiguration {
@@ -15,6 +19,11 @@ public ObjectMapper objectMapper() {
return new ObjectMapper();
}
+ @Bean
+ public Network testNetwork() {
+ return Network.newNetwork();
+ }
+
@Bean
@ServiceConnection
MariaDBContainer> mariadbContainer() {
@@ -24,8 +33,20 @@ MariaDBContainer> mariadbContainer() {
.withPassword("qwer1234");
}
+ @Bean
+ GenericContainer> lokiContainer(Network network) {
+ return new GenericContainer<>(DockerImageName.parse("grafana/loki:2.9.0"))
+ .withNetwork(network)
+ .withNetworkAliases("loki")
+ .withExposedPorts(3100)
+ .withCommand("-config.file=/etc/loki/local-config.yaml")
+ .waitingFor(Wait.forHttp("/ready"))
+ .withStartupTimeout(java.time.Duration.ofMinutes(2));
+ }
+
@DynamicPropertySource
- static void configureProperties(DynamicPropertyRegistry registry, MariaDBContainer> mariadb) {
+ static void configureProperties(
+ DynamicPropertyRegistry registry, MariaDBContainer> mariadb, GenericContainer> loki) {
// MariaDB 연결 설정
registry.add("spring.datasource.url", mariadb::getJdbcUrl);
registry.add("spring.datasource.username", mariadb::getUsername);
@@ -39,5 +60,7 @@ static void configureProperties(DynamicPropertyRegistry registry, MariaDBContain
registry.add("spring.hikari.maximum-pool-size", () -> "10");
registry.add("spring.hikari.minimum-idle", () -> "5");
registry.add("spring.hikari.pool-name", () -> "HikariCP-E2E");
+
+ System.setProperty("loki.port", String.valueOf(loki.getMappedPort(3100)));
}
}
diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java
index 5c538105..4fe3b00d 100644
--- a/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java
+++ b/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java
@@ -13,6 +13,7 @@
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.springframework.http.*;
+import org.springframework.mock.web.MockHttpSession;
import org.springframework.restdocs.payload.JsonFieldType;
import org.springframework.test.context.jdbc.Sql;
import org.springframework.transaction.annotation.Transactional;
@@ -79,4 +80,62 @@ void login_success() throws Exception {
.description("HTTP 상태"))
.build())));
}
+
+ @Test
+ @DisplayName("사용자 로그아웃 성공")
+ void logout_success() throws Exception {
+ // given - 먼저 로그인
+ Map loginRequest = new HashMap<>();
+ loginRequest.put("email", "admin@icebang.site");
+ loginRequest.put("password", "qwer1234!A");
+
+ MockHttpSession session = new MockHttpSession();
+
+ // 로그인 먼저 수행
+ mockMvc
+ .perform(
+ post(getApiUrlForDocs("/v0/auth/login"))
+ .contentType(MediaType.APPLICATION_JSON)
+ .session(session)
+ .content(objectMapper.writeValueAsString(loginRequest)))
+ .andExpect(status().isOk());
+
+ // when & then - 로그아웃 수행
+ mockMvc
+ .perform(
+ post(getApiUrlForDocs("/v0/auth/logout"))
+ .contentType(MediaType.APPLICATION_JSON)
+ .session(session)
+ .header("Origin", "https://admin.icebang.site")
+ .header("Referer", "https://admin.icebang.site/"))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.success").value(true))
+ .andExpect(jsonPath("$.status").value("OK"))
+ .andExpect(jsonPath("$.message").value("OK"))
+ .andExpect(jsonPath("$.data").isEmpty())
+ .andDo(
+ document(
+ "auth-logout",
+ preprocessRequest(prettyPrint()),
+ preprocessResponse(prettyPrint()),
+ resource(
+ ResourceSnippetParameters.builder()
+ .tag("Authentication")
+ .summary("사용자 로그아웃")
+ .description("현재 인증된 사용자의 세션을 무효화합니다")
+ .responseFields(
+ fieldWithPath("success")
+ .type(JsonFieldType.BOOLEAN)
+ .description("요청 성공 여부"),
+ fieldWithPath("data")
+ .type(JsonFieldType.NULL)
+ .description("응답 데이터 (로그아웃 성공 시 null)"),
+ fieldWithPath("message")
+ .type(JsonFieldType.STRING)
+ .description("응답 메시지"),
+ fieldWithPath("status")
+ .type(JsonFieldType.STRING)
+ .description("HTTP 상태"))
+ .build())));
+ }
}
diff --git a/docker/local/docker-compose.yml b/docker/local/docker-compose.yml
index c0bf14fd..6e27be91 100644
--- a/docker/local/docker-compose.yml
+++ b/docker/local/docker-compose.yml
@@ -33,5 +33,74 @@ services:
depends_on:
- mariadb
+ loki:
+ image: grafana/loki:2.9.0
+ container_name: loki
+ restart: unless-stopped
+ ports:
+ - "3100:3100"
+ command: -config.file=/etc/loki/local-config.yaml
+ volumes:
+ - loki_data:/loki
+ healthcheck:
+ test: ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:3100/ready || exit 1"]
+ interval: 10s
+ timeout: 5s
+ retries: 5
+ networks:
+ - icebang-network
+
+ promtail:
+ image: grafana/promtail:2.9.0
+ container_name: promtail
+ restart: unless-stopped
+ ports:
+ - "9080:9080"
+ volumes:
+ - ./promtail-config.yml:/etc/promtail/config.yml:ro # config 파일
+ - ./logs:/logs:cached
+ - promtail_positions:/var/lib/promtail # positions 파일용 writable volume
+ command:
+ - -config.file=/etc/promtail/config.yml
+ - -config.expand-env=true
+ ulimits:
+ nofile:
+ soft: 65535
+ hard: 65535
+ depends_on:
+ - loki
+ healthcheck:
+ test: ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:9080/ready || exit 1"]
+ interval: 30s
+ timeout: 10s
+ retries: 5
+
+ grafana:
+ image: grafana/grafana:10.1.0
+ container_name: grafana
+ restart: unless-stopped
+ environment:
+ - GF_SECURITY_ADMIN_PASSWORD=admin
+ ports:
+ - "3030:3000"
+ volumes:
+ - grafana_data:/var/lib/grafana
+ depends_on:
+ - loki
+ healthcheck:
+ test: ["CMD-SHELL", "curl -f http://localhost:3000/api/health || exit 1"]
+ interval: 30s
+ timeout: 10s
+ retries: 5
+ networks:
+ - icebang-network
+
volumes:
- mariadb_data:
\ No newline at end of file
+ mariadb_data:
+ loki_data:
+ grafana_data:
+ promtail_positions: {} # Promtail positions 파일용 named volume
+
+networks:
+ icebang-network:
+ driver: bridge
\ No newline at end of file
diff --git a/docker/local/promtail-config.yml b/docker/local/promtail-config.yml
new file mode 100644
index 00000000..687e1251
--- /dev/null
+++ b/docker/local/promtail-config.yml
@@ -0,0 +1,84 @@
+server:
+ http_listen_port: 9080
+ grpc_listen_port: 0
+
+positions:
+ filename: /tmp/positions.yaml
+
+clients:
+ - url: http://localhost:3100/loki/api/v1/push
+
+scrape_configs:
+ - job_name: user-service-logs
+ static_configs:
+ - targets:
+ - localhost
+ labels:
+ job: user-service
+ app: user-service
+ env: develop
+ __path__: /logs/develop/app.log
+ pipeline_stages:
+ - regex:
+ expression: '^\[(?P[^\]]*)\] \[(?P[^\]]*)\] (?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}) \[(?P[^\]]+)\] (?P\w+)\s+(?P\S+) - (?P.*)$'
+ - labels:
+ traceId:
+ level:
+ thread:
+ logger:
+ spanId:
+
+ - job_name: user-service-errors
+ static_configs:
+ - targets:
+ - localhost
+ labels:
+ job: user-service-errors
+ app: user-service
+ env: develop
+ log_type: error
+ __path__: /logs/develop/error.log
+ pipeline_stages:
+ - regex:
+ expression: '^\[(?P[^\]]*)\] \[(?P[^\]]*)\] (?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}) \[(?P[^\]]+)\] (?P\w+)\s+(?P\S+) - (?P.*)$'
+ - labels:
+ traceId:
+ level:
+ thread:
+ logger:
+ spanId:
+
+ - job_name: pre-processing-logs
+ static_configs:
+ - targets:
+ - localhost
+ labels:
+ job: pre-processing
+ app: pre-processing
+ env: develop
+ __path__: /logs/develop/pre-processing-app.log
+ pipeline_stages:
+ - regex:
+ expression: '^\[(?P[^\]]*)\] (?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}) \| (?P\w+) \| (?P[^:]+:[^:]+:\d+) \| (?P.*)$'
+ - labels:
+ traceId:
+ level:
+ logger:
+
+ - job_name: pre-processing-errors
+ static_configs:
+ - targets:
+ - localhost
+ labels:
+ job: pre-processing-errors
+ app: pre-processing
+ env: develop
+ log_type: error
+ __path__: /logs/develop/pre-processing-app-error.log
+ pipeline_stages:
+ - regex:
+ expression: '^\[(?P[^\]]*)\] (?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}) \| (?P\w+) \| (?P[^:]+:[^:]+:\d+) \| (?P.*)$'
+ - labels:
+ traceId:
+ level:
+ logger:
diff --git a/docker/production/docker-compose.yml b/docker/production/docker-compose.yml
index 659934c7..deff1ca4 100644
--- a/docker/production/docker-compose.yml
+++ b/docker/production/docker-compose.yml
@@ -19,6 +19,8 @@ services:
image: ghcr.io/kernel180-be12/final-4team-icebang/user-service:latest
container_name: user-service
restart: on-failure:3
+ depends_on:
+ - promtail
ports:
- "8080:8080"
networks:
@@ -27,6 +29,25 @@ services:
- .env.prod
environment:
- SPRING_PROFILES_ACTIVE=production
+ volumes:
+ - logs_volume:/logs
+
+ promtail:
+ image: grafana/promtail:2.9.0
+ container_name: promtail
+ restart: unless-stopped
+ volumes:
+ - ./promtail-config.yml:/etc/promtail/config.yml:ro
+ - logs_volume:/logs # Spring 로그 읽기
+ command:
+ - -config.file=/etc/promtail/config.yml
+ - -config.expand-env=true
+ ulimits:
+ nofile:
+ soft: 65535
+ hard: 65535
+ env_file:
+ - .env.prod
pre-processing-service:
image: ghcr.io/kernel180-be12/final-4team-icebang/pre-processing-service:latest
@@ -38,10 +59,16 @@ services:
- app-network
env_file:
- .env.prod
+ volumes:
+ - onnx_models:/app/models # ONNX 모델 저장용 볼륨
volumes:
caddy_data:
caddy_config:
+ logs_volume:
+ driver: local
+ onnx_models:
+ driver: local
networks:
app-network:
diff --git a/docker/production/promtail-config.yml b/docker/production/promtail-config.yml
new file mode 100644
index 00000000..30beb73a
--- /dev/null
+++ b/docker/production/promtail-config.yml
@@ -0,0 +1,49 @@
+server:
+ http_listen_port: 9080
+ grpc_listen_port: 0
+
+positions:
+ filename: /tmp/positions.yaml
+
+clients:
+ - url: https://${LOKI_USERNAME}:${LOKI_PASSWORD}@${LOKI_HOST}/loki/api/v1/push
+
+scrape_configs:
+ - job_name: user-service-logs
+ static_configs:
+ - targets:
+ - localhost
+ labels:
+ job: user-service
+ app: user-service
+ env: production
+ __path__: /logs/production/app.log
+ pipeline_stages:
+ - regex:
+ expression: '^\[(?P[^\]]*)\] \[(?P[^\]]*)\] (?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}) \[(?P[^\]]+)\] (?P\w+)\s+(?P\S+) - (?P.*)$'
+ - labels:
+ traceId:
+ level:
+ thread:
+ logger:
+ spanId:
+
+ - job_name: user-service-errors
+ static_configs:
+ - targets:
+ - localhost
+ labels:
+ job: user-service-errors
+ app: user-service
+ env: production
+ log_type: error
+ __path__: /logs/production/error.log
+ pipeline_stages:
+ - regex:
+ expression: '^\[(?P[^\]]*)\] \[(?P[^\]]*)\] (?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}) \[(?P[^\]]+)\] (?P\w+)\s+(?P\S+) - (?P.*)$'
+ - labels:
+ traceId:
+ level:
+ thread:
+ logger:
+ spanId:
\ No newline at end of file
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 00000000..3557bc5a
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,7 @@
+# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand.
+package = []
+
+[metadata]
+lock-version = "2.1"
+python-versions = ">=3.11"
+content-hash = "f5666f5625d676c506924a57dc0520a1f3ed2b2c774baed3dc85353594f8473d"