From f3bdd290b342717490a0c0659c52d7afe076fa3a Mon Sep 17 00:00:00 2001 From: thkim7 Date: Tue, 9 Sep 2025 03:42:19 +0900 Subject: [PATCH 01/31] =?UTF-8?q?refactor:=20=EC=84=9C=EB=B9=84=EC=8A=A4?= =?UTF-8?q?=20=EB=A0=88=EC=9D=B4=EC=96=B4=EB=A5=BC=20=ED=95=A8=EC=88=98?= =?UTF-8?q?=EC=97=90=EC=84=9C=20=ED=81=B4=EB=9E=98=EC=8A=A4=20=EB=B0=A9?= =?UTF-8?q?=EC=8B=9D=EC=9C=BC=EB=A1=9C=20=EB=B3=80=EA=B2=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 모든 서비스 함수를 클래스 기반 구조로 변환 - product.py 라우터에서 서비스 인스턴스 생성 방식으로 변경 - blog.py와 동일한 패턴으로 일관성 확보 - CustomException 구조에 맞게 에러 처리 개선 - 기존 비즈니스 로직은 그대로 유지 --- .../app/api/endpoints/product.py | 63 ++++- .../app/service/crawl_service.py | 88 +++---- .../app/service/match_service.py | 104 ++++---- .../app/service/search_service.py | 130 +++++----- .../app/service/similarity_service.py | 238 +++++++++--------- 5 files changed, 341 insertions(+), 282 deletions(-) diff --git a/apps/pre-processing-service/app/api/endpoints/product.py b/apps/pre-processing-service/app/api/endpoints/product.py index 4e8c6682..f8f69dff 100644 --- a/apps/pre-processing-service/app/api/endpoints/product.py +++ b/apps/pre-processing-service/app/api/endpoints/product.py @@ -1,38 +1,76 @@ from fastapi import APIRouter, Request, HTTPException from app.decorators.logging import log_api_call -from ...errors.CustomException import InvalidItemDataException, ItemNotFoundException -from ...service.crawl_service import crawl_product_detail -from ...service.search_service import search_products -from ...service.match_service import match_products -from ...service.similarity_service import select_product_by_similarity +from ...errors.CustomException import InvalidItemDataException, ItemNotFoundException, CustomException +from ...service.crawl_service import CrawlService +from ...service.search_service import SearchService +from ...service.match_service import MatchService +from ...service.similarity_service import SimilarityService from ...model.schemas import * router = APIRouter() + @router.get("/") async def root(): return {"message": "product API"} + @router.post("/search", response_model=ResponseSadaguSearch) async def search(request: RequestSadaguSearch): """ 상품 검색 엔드포인트 """ - return await search_products(request) + try: + search_service = SearchService() + result = await search_service.search_products(request) + + if not result: + raise CustomException(500, "상품 검색에 실패했습니다.", "SEARCH_FAILED") + + return result + except InvalidItemDataException as e: + raise HTTPException(status_code=e.status_code, detail=e.detail) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + @router.post("/match", response_model=ResponseSadaguMatch) async def match(request: RequestSadaguMatch): """ 상품 매칭 엔드포인트 """ - return match_products(request) + try: + match_service = MatchService() + result = match_service.match_products(request) + + if not result: + raise CustomException(500, "상품 매칭에 실패했습니다.", "MATCH_FAILED") + + return result + except InvalidItemDataException as e: + raise HTTPException(status_code=e.status_code, detail=e.detail) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + @router.post("/similarity", response_model=ResponseSadaguSimilarity) async def similarity(request: RequestSadaguSimilarity): """ 유사도 분석 엔드포인트 """ - return select_product_by_similarity(request) + try: + similarity_service = SimilarityService() + result = similarity_service.select_product_by_similarity(request) + + if not result: + raise CustomException(500, "유사도 분석에 실패했습니다.", "SIMILARITY_FAILED") + + return result + except InvalidItemDataException as e: + raise HTTPException(status_code=e.status_code, detail=e.detail) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + @router.post("/crawl", response_model=ResponseSadaguCrawl) async def crawl(request: Request, body: RequestSadaguCrawl): @@ -40,11 +78,16 @@ async def crawl(request: Request, body: RequestSadaguCrawl): 상품 상세 정보 크롤링 엔드포인트 """ try: - result = await crawl_product_detail(body) + crawl_service = CrawlService() + result = await crawl_service.crawl_product_detail(body) + + if not result: + raise CustomException(500, "상품 크롤링에 실패했습니다.", "CRAWL_FAILED") + return result except InvalidItemDataException as e: raise HTTPException(status_code=e.status_code, detail=e.detail) except ItemNotFoundException as e: raise HTTPException(status_code=e.status_code, detail=e.detail) except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/crawl_service.py b/apps/pre-processing-service/app/service/crawl_service.py index 11844ead..f4d556ba 100644 --- a/apps/pre-processing-service/app/service/crawl_service.py +++ b/apps/pre-processing-service/app/service/crawl_service.py @@ -5,45 +5,49 @@ from app.model.schemas import RequestSadaguCrawl -async def crawl_product_detail(request: RequestSadaguCrawl) -> dict: - """ - 선택된 상품의 상세 정보를 크롤링하는 비즈니스 로직입니다. (5단계) - 상품 URL을 입력받아 상세 정보를 크롤링하여 딕셔너리로 반환합니다. - """ - crawler = DetailCrawler(use_selenium=request.use_selenium) - - try: - print(f"상품 상세 크롤링 시작: {request.product_url}") - - # 상세 정보 크롤링 실행 - product_detail = await crawler.crawl_detail( - product_url=str(request.product_url), - include_images=request.include_images - ) - - if not product_detail: - raise InvalidItemDataException("상품 상세 정보 크롤링 실패") - - print(f"크롤링 완료: {product_detail.get('title', 'Unknown')[:50]}") - - # 응답 데이터 구성 - response_data = { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "tag": request.tag, - "product_url": str(request.product_url), - "use_selenium": request.use_selenium, - "include_images": request.include_images, - "product_detail": product_detail, - "status": "success", - "crawled_at": time.strftime('%Y-%m-%d %H:%M:%S') - } - - return response_data - - except Exception as e: - print(f"크롤링 서비스 오류: {e}") - raise InvalidItemDataException(f"상품 상세 크롤링 오류: {e}") - finally: - await crawler.close() \ No newline at end of file +class CrawlService: + def __init__(self): + pass + + async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: + """ + 선택된 상품의 상세 정보를 크롤링하는 비즈니스 로직입니다. (5단계) + 상품 URL을 입력받아 상세 정보를 크롤링하여 딕셔너리로 반환합니다. + """ + crawler = DetailCrawler(use_selenium=request.use_selenium) + + try: + print(f"상품 상세 크롤링 시작: {request.product_url}") + + # 상세 정보 크롤링 실행 + product_detail = await crawler.crawl_detail( + product_url=str(request.product_url), + include_images=request.include_images + ) + + if not product_detail: + raise InvalidItemDataException("상품 상세 정보 크롤링 실패") + + print(f"크롤링 완료: {product_detail.get('title', 'Unknown')[:50]}") + + # 응답 데이터 구성 + response_data = { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "tag": request.tag, + "product_url": str(request.product_url), + "use_selenium": request.use_selenium, + "include_images": request.include_images, + "product_detail": product_detail, + "status": "success", + "crawled_at": time.strftime('%Y-%m-%d %H:%M:%S') + } + + return response_data + + except Exception as e: + print(f"크롤링 서비스 오류: {e}") + raise InvalidItemDataException(f"상품 상세 크롤링 오류: {e}") + finally: + await crawler.close() \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/match_service.py b/apps/pre-processing-service/app/service/match_service.py index 6b1cc171..e52f821c 100644 --- a/apps/pre-processing-service/app/service/match_service.py +++ b/apps/pre-processing-service/app/service/match_service.py @@ -3,64 +3,68 @@ from ..model.schemas import RequestSadaguMatch -def match_products(request: RequestSadaguMatch) -> dict: - """ - 키워드 매칭 로직 (MeCab 등 사용) - 3단계 - """ - keyword = request.keyword - products = request.search_results +class MatchService: + def __init__(self): + pass - if not products: - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "matched_products": [], - "status": "success" - } + def match_products(self, request: RequestSadaguMatch) -> dict: + """ + 키워드 매칭 로직 (MeCab 등 사용) - 3단계 + """ + keyword = request.keyword + products = request.search_results - try: - matcher = KeywordMatcher() - matched_products = [] + if not products: + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "matched_products": [], + "status": "success" + } - print(f"키워드 '{keyword}'와 {len(products)}개 상품 매칭 분석 시작...") + try: + matcher = KeywordMatcher() + matched_products = [] - for i, product in enumerate(products): - title = product.get('title', '') - if not title: - continue + print(f"키워드 '{keyword}'와 {len(products)}개 상품 매칭 분석 시작...") - # 키워드 매칭 분석 - match_result = matcher.analyze_keyword_match(title, keyword) + for i, product in enumerate(products): + title = product.get('title', '') + if not title: + continue - print(f"상품 {i + 1}: {title[:50]} | {match_result['reason']}") + # 키워드 매칭 분석 + match_result = matcher.analyze_keyword_match(title, keyword) - if match_result['is_match']: - # 매칭된 상품에 매칭 정보 추가 - matched_product = product.copy() - matched_product['match_info'] = { - 'match_type': match_result['match_type'], - 'match_score': match_result['score'], - 'match_reason': match_result['reason'] - } - matched_products.append(matched_product) - print(f" ✅ 매칭됨!") + print(f"상품 {i + 1}: {title[:50]} | {match_result['reason']}") - print(f"매칭 결과: {len(matched_products)}개 상품") + if match_result['is_match']: + # 매칭된 상품에 매칭 정보 추가 + matched_product = product.copy() + matched_product['match_info'] = { + 'match_type': match_result['match_type'], + 'match_score': match_result['score'], + 'match_reason': match_result['reason'] + } + matched_products.append(matched_product) + print(f" ✅ 매칭됨!") - # 매칭 스코어 기준으로 정렬 (높은 순) - matched_products.sort(key=lambda x: x['match_info']['match_score'], reverse=True) + print(f"매칭 결과: {len(matched_products)}개 상품") - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "matched_products": matched_products, - "status": "success" - } + # 매칭 스코어 기준으로 정렬 (높은 순) + matched_products.sort(key=lambda x: x['match_info']['match_score'], reverse=True) - except Exception as e: - print(f"매칭 서비스 오류: {e}") - raise InvalidItemDataException(f"키워드 매칭 실패: {str(e)}") \ No newline at end of file + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "matched_products": matched_products, + "status": "success" + } + + except Exception as e: + print(f"매칭 서비스 오류: {e}") + raise InvalidItemDataException(f"키워드 매칭 실패: {str(e)}") \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/search_service.py b/apps/pre-processing-service/app/service/search_service.py index da7aa1fd..d676f7dd 100644 --- a/apps/pre-processing-service/app/service/search_service.py +++ b/apps/pre-processing-service/app/service/search_service.py @@ -3,79 +3,83 @@ from ..model.schemas import RequestSadaguSearch -async def search_products(request: RequestSadaguSearch) -> dict: - """ - 키워드 기반으로 상품을 검색하는 비즈니스 로직 (2단계) - """ - keyword = request.keyword - crawler = SearchCrawler(use_selenium=True) +class SearchService: + def __init__(self): + pass - try: - print(f"키워드 '{keyword}'로 상품 검색 시작...") + async def search_products(self, request: RequestSadaguSearch) -> dict: + """ + 키워드 기반으로 상품을 검색하는 비즈니스 로직 (2단계) + """ + keyword = request.keyword + crawler = SearchCrawler(use_selenium=True) - # Selenium 또는 httpx로 상품 검색 - if crawler.use_selenium: - search_results = await crawler.search_products_selenium(keyword) - else: - search_results = await crawler.search_products_httpx(keyword) + try: + print(f"키워드 '{keyword}'로 상품 검색 시작...") - if not search_results: - print("검색 결과가 없습니다.") - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "search_results": [], - "status": "success" - } + # Selenium 또는 httpx로 상품 검색 + if crawler.use_selenium: + search_results = await crawler.search_products_selenium(keyword) + else: + search_results = await crawler.search_products_httpx(keyword) - # 상품별 기본 정보 수집 (제목이 없는 경우 다시 크롤링) - enriched_results = [] - print(f"총 {len(search_results)}개 상품의 기본 정보를 수집 중...") + if not search_results: + print("검색 결과가 없습니다.") + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "search_results": [], + "status": "success" + } - for i, product in enumerate(search_results): - try: - # 이미 제목이 있고 유효한 경우 그대로 사용 - if product.get('title') and product['title'] != 'Unknown Title' and len(product['title'].strip()) > 0: - enriched_results.append(product) - else: - # 제목이 없거나 유효하지 않은 경우 다시 크롤링 - print(f"상품 {i + 1}: 제목 재수집 중... ({product['url']})") - basic_info = await crawler.get_basic_product_info(product['url']) + # 상품별 기본 정보 수집 (제목이 없는 경우 다시 크롤링) + enriched_results = [] + print(f"총 {len(search_results)}개 상품의 기본 정보를 수집 중...") - if basic_info and basic_info['title'] != "제목 없음": - enriched_results.append({ - 'url': product['url'], - 'title': basic_info['title'] - }) + for i, product in enumerate(search_results): + try: + # 이미 제목이 있고 유효한 경우 그대로 사용 + if product.get('title') and product['title'] != 'Unknown Title' and len(product['title'].strip()) > 0: + enriched_results.append(product) else: - # 그래도 제목을 못 찾으면 제외 - print(f" 제목 추출 실패, 제외") - continue + # 제목이 없거나 유효하지 않은 경우 다시 크롤링 + print(f"상품 {i + 1}: 제목 재수집 중... ({product['url']})") + basic_info = await crawler.get_basic_product_info(product['url']) + + if basic_info and basic_info['title'] != "제목 없음": + enriched_results.append({ + 'url': product['url'], + 'title': basic_info['title'] + }) + else: + # 그래도 제목을 못 찾으면 제외 + print(f" 제목 추출 실패, 제외") + continue - # 최대 20개까지만 처리 - if len(enriched_results) >= 20: - break + # 최대 20개까지만 처리 + if len(enriched_results) >= 20: + break - except Exception as e: - print(f"상품 {i + 1} 처리 중 오류: {e}") - continue + except Exception as e: + print(f"상품 {i + 1} 처리 중 오류: {e}") + continue - print(f"최종 수집된 유효 상품: {len(enriched_results)}개") + print(f"최종 수집된 유효 상품: {len(enriched_results)}개") - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "search_results": enriched_results, - "status": "success" - } + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "search_results": enriched_results, + "status": "success" + } - except Exception as e: - print(f"검색 서비스 오류: {e}") - raise InvalidItemDataException(f"상품 검색 실패: {str(e)}") + except Exception as e: + print(f"검색 서비스 오류: {e}") + raise InvalidItemDataException(f"상품 검색 실패: {str(e)}") - finally: - await crawler.close() \ No newline at end of file + finally: + await crawler.close() \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/similarity_service.py b/apps/pre-processing-service/app/service/similarity_service.py index 27823e9e..3a94800f 100644 --- a/apps/pre-processing-service/app/service/similarity_service.py +++ b/apps/pre-processing-service/app/service/similarity_service.py @@ -3,135 +3,139 @@ from ..model.schemas import RequestSadaguSimilarity -def select_product_by_similarity(request: RequestSadaguSimilarity) -> dict: - """ - BERT 기반 유사도 분석 후 상품 선택 - 4단계 - """ - keyword = request.keyword - candidates = request.matched_products - fallback_products = request.search_results or [] - - # 매칭된 상품이 없으면 전체 검색 결과로 폴백 - if not candidates: - if not fallback_products: - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "selected_product": None, - "reason": "매칭된 상품과 검색 결과가 모두 없음", - "status": "success" - } - - print("매칭된 상품 없음 → 전체 검색 결과에서 유사도 분석") - candidates = fallback_products - analysis_mode = "fallback_similarity_only" - else: - analysis_mode = "matched_products" - - try: - analyzer = SimilarityAnalyzer() - - print(f"키워드 '{keyword}'와 {len(candidates)}개 상품의 유사도 분석 시작... (모드: {analysis_mode})") - - # 한 개만 있으면 바로 선택 - if len(candidates) == 1: - selected_product = candidates[0] - - # 유사도 계산 - similarity = analyzer.calculate_similarity(keyword, selected_product['title']) +class SimilarityService: + def __init__(self): + pass + + def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict: + """ + BERT 기반 유사도 분석 후 상품 선택 - 4단계 + """ + keyword = request.keyword + candidates = request.matched_products + fallback_products = request.search_results or [] + + # 매칭된 상품이 없으면 전체 검색 결과로 폴백 + if not candidates: + if not fallback_products: + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "selected_product": None, + "reason": "매칭된 상품과 검색 결과가 모두 없음", + "status": "success" + } + + print("매칭된 상품 없음 → 전체 검색 결과에서 유사도 분석") + candidates = fallback_products + analysis_mode = "fallback_similarity_only" + else: + analysis_mode = "matched_products" + + try: + analyzer = SimilarityAnalyzer() + + print(f"키워드 '{keyword}'와 {len(candidates)}개 상품의 유사도 분석 시작... (모드: {analysis_mode})") + + # 한 개만 있으면 바로 선택 + if len(candidates) == 1: + selected_product = candidates[0] + + # 유사도 계산 + similarity = analyzer.calculate_similarity(keyword, selected_product['title']) + + # 폴백 모드에서는 임계값 검증 + if analysis_mode == "fallback_similarity_only": + similarity_threshold = 0.3 + if similarity < similarity_threshold: + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "selected_product": None, + "reason": f"단일 상품 유사도({similarity:.4f}) < 기준({similarity_threshold})", + "status": "success" + } + + selected_product['similarity_info'] = { + 'similarity_score': float(similarity), + 'analysis_type': 'single_candidate', + 'analysis_mode': analysis_mode + } + + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "selected_product": selected_product, + "reason": f"단일 상품 - 유사도: {similarity:.4f} ({analysis_mode})", + "status": "success" + } + + # 여러 개가 있으면 유사도 비교 + print("여러 상품 중 최고 유사도로 선택...") + + # 제목만 추출해서 배치 분석 + titles = [product['title'] for product in candidates] + similarity_results = analyzer.analyze_similarity_batch(keyword, titles) + + # 결과 출력 + for result in similarity_results: + print(f" {result['title'][:40]} | 유사도: {result['similarity']:.4f}") + + # 최고 유사도 선택 + best_result = similarity_results[0] + selected_product = candidates[best_result['index']].copy() # 폴백 모드에서는 임계값 검증 - if analysis_mode == "fallback_similarity_only": - similarity_threshold = 0.3 - if similarity < similarity_threshold: - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "selected_product": None, - "reason": f"단일 상품 유사도({similarity:.4f}) < 기준({similarity_threshold})", - "status": "success" - } - + similarity_threshold = 0.3 + if analysis_mode == "fallback_similarity_only" and best_result['similarity'] < similarity_threshold: + return { + "job_id": request.job_id, + "schedule_id": request.schedule_id, + "schedule_his_id": request.schedule_his_id, + "keyword": keyword, + "selected_product": None, + "reason": f"최고 유사도({best_result['similarity']:.4f}) < 기준({similarity_threshold})", + "status": "success" + } + + # 유사도 정보 추가 selected_product['similarity_info'] = { - 'similarity_score': float(similarity), - 'analysis_type': 'single_candidate', - 'analysis_mode': analysis_mode + 'similarity_score': best_result['similarity'], + 'analysis_type': 'multi_candidate_bert', + 'analysis_mode': analysis_mode, + 'rank': 1, + 'total_candidates': len(candidates) } - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "selected_product": selected_product, - "reason": f"단일 상품 - 유사도: {similarity:.4f} ({analysis_mode})", - "status": "success" - } - - # 여러 개가 있으면 유사도 비교 - print("여러 상품 중 최고 유사도로 선택...") - - # 제목만 추출해서 배치 분석 - titles = [product['title'] for product in candidates] - similarity_results = analyzer.analyze_similarity_batch(keyword, titles) + # 매칭 모드에서는 종합 점수도 계산 + if analysis_mode == "matched_products" and 'match_info' in selected_product: + match_score = selected_product['match_info']['match_score'] + similarity_score = best_result['similarity'] + # 가중치: 매칭 40%, 유사도 60% + final_score = match_score * 0.4 + similarity_score * 0.6 + selected_product['final_score'] = final_score + reason = f"종합점수({final_score:.4f}) = 매칭({match_score:.4f})*0.4 + 유사도({similarity_score:.4f})*0.6" + else: + reason = f"유사도({best_result['similarity']:.4f}) 기준 선택 ({analysis_mode})" - # 결과 출력 - for result in similarity_results: - print(f" {result['title'][:40]} | 유사도: {result['similarity']:.4f}") + print(f"선택됨: {selected_product['title'][:50]} | {reason}") - # 최고 유사도 선택 - best_result = similarity_results[0] - selected_product = candidates[best_result['index']].copy() - - # 폴백 모드에서는 임계값 검증 - similarity_threshold = 0.3 - if analysis_mode == "fallback_similarity_only" and best_result['similarity'] < similarity_threshold: return { "job_id": request.job_id, "schedule_id": request.schedule_id, "schedule_his_id": request.schedule_his_id, "keyword": keyword, - "selected_product": None, - "reason": f"최고 유사도({best_result['similarity']:.4f}) < 기준({similarity_threshold})", + "selected_product": selected_product, + "reason": reason, "status": "success" } - # 유사도 정보 추가 - selected_product['similarity_info'] = { - 'similarity_score': best_result['similarity'], - 'analysis_type': 'multi_candidate_bert', - 'analysis_mode': analysis_mode, - 'rank': 1, - 'total_candidates': len(candidates) - } - - # 매칭 모드에서는 종합 점수도 계산 - if analysis_mode == "matched_products" and 'match_info' in selected_product: - match_score = selected_product['match_info']['match_score'] - similarity_score = best_result['similarity'] - # 가중치: 매칭 40%, 유사도 60% - final_score = match_score * 0.4 + similarity_score * 0.6 - selected_product['final_score'] = final_score - reason = f"종합점수({final_score:.4f}) = 매칭({match_score:.4f})*0.4 + 유사도({similarity_score:.4f})*0.6" - else: - reason = f"유사도({best_result['similarity']:.4f}) 기준 선택 ({analysis_mode})" - - print(f"선택됨: {selected_product['title'][:50]} | {reason}") - - return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, - "keyword": keyword, - "selected_product": selected_product, - "reason": reason, - "status": "success" - } - - except Exception as e: - print(f"유사도 분석 서비스 오류: {e}") - raise InvalidItemDataException(f"유사도 분석 실패: {str(e)}") \ No newline at end of file + except Exception as e: + print(f"유사도 분석 서비스 오류: {e}") + raise InvalidItemDataException(f"유사도 분석 실패: {str(e)}") \ No newline at end of file From c5e6b5ba3fba674252244369728c0244e7e69752 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Tue, 9 Sep 2025 03:55:55 +0900 Subject: [PATCH 02/31] =?UTF-8?q?refactor:=20schemas=EC=97=90=20title,=20d?= =?UTF-8?q?escription=20=EC=B6=94=EA=B0=80=20=EB=B0=8F=20=ED=95=84?= =?UTF-8?q?=EC=9A=94=EC=97=86=EB=8A=94=20=EB=B6=80=EB=B6=84=20=EC=A0=9C?= =?UTF-8?q?=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - title, description 추가 - selenium 사용 여부 및 이미지 포함 여부 삭제(service) - 기존 비즈니스 로직은 그대로 유지 --- .../app/model/schemas.py | 56 +++++++++---------- .../app/service/crawl_service.py | 7 +-- 2 files changed, 28 insertions(+), 35 deletions(-) diff --git a/apps/pre-processing-service/app/model/schemas.py b/apps/pre-processing-service/app/model/schemas.py index f206f3e9..9a4893cd 100644 --- a/apps/pre-processing-service/app/model/schemas.py +++ b/apps/pre-processing-service/app/model/schemas.py @@ -30,53 +30,49 @@ class ResponseNaverSearch(ResponseBase): # 2단계: 검색 class RequestSadaguSearch(RequestBase): - keyword: str + keyword: str = Field(..., title="검색 키워드", description="상품을 검색할 키워드") class ResponseSadaguSearch(ResponseBase): - keyword: str - search_results: List[Dict] + keyword: str = Field(..., title="검색 키워드", description="검색에 사용된 키워드") + search_results: List[Dict] = Field(..., title="검색 결과", description="검색된 상품 목록") # 3단계: 매칭 class RequestSadaguMatch(RequestBase): - keyword: str - search_results: List[Dict] + keyword: str = Field(..., title="매칭 키워드", description="상품과 매칭할 키워드") + search_results: List[Dict] = Field(..., title="검색 결과", description="이전 단계에서 검색된 상품 목록") class ResponseSadaguMatch(ResponseBase): - keyword: str - matched_products: List[Dict] + keyword: str = Field(..., title="매칭 키워드", description="매칭에 사용된 키워드") + matched_products: List[Dict] = Field(..., title="매칭된 상품", description="키워드와 매칭된 상품 목록") # 4단계: 유사도 class RequestSadaguSimilarity(RequestBase): - keyword: str - matched_products: List[Dict] - search_results: Optional[List[Dict]] = None # 3단계에서 매칭 실패시 폴백용 + keyword: str = Field(..., title="유사도 분석 키워드", description="유사도 분석할 키워드") + matched_products: List[Dict] = Field(..., title="매칭된 상품", description="이전 단계에서 매칭된 상품 목록") + search_results: Optional[List[Dict]] = Field(None, title="검색 결과", description="매칭 실패시 사용할 전체 검색 결과 (폴백용)") class ResponseSadaguSimilarity(ResponseBase): - keyword: str - selected_product: Optional[Dict] = None - reason: Optional[str] = None + keyword: str = Field(..., title="분석 키워드", description="유사도 분석에 사용된 키워드") + selected_product: Optional[Dict] = Field(None, title="선택된 상품", description="유사도 분석 결과 선택된 상품") + reason: Optional[str] = Field(None, title="선택 이유", description="상품 선택 근거 및 점수 정보") # 사다구몰 크롤링 class RequestSadaguCrawl(BaseModel): - job_id: int = Field(..., description="작업 ID") - schedule_id: int = Field(..., description="스케줄 ID") - schedule_his_id: int = Field(..., description="스케줄 히스토리 ID") - tag: str = Field(..., description="크롤링 태그 (예: 'detail')") - product_url: HttpUrl = Field(..., description="크롤링할 상품의 URL") - use_selenium: bool = Field(default=True, description="Selenium 사용 여부") - include_images: bool = Field(default=False, description="이미지 정보 포함 여부") + job_id: int = Field(..., title="작업 ID", description="현재 실행중인 작업의 고유 식별자") + schedule_id: int = Field(..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자") + schedule_his_id: int = Field(..., title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자") + tag: str = Field(..., title="크롤링 태그", description="크롤링 유형을 구분하는 태그 (예: 'detail')") + product_url: HttpUrl = Field(..., title="상품 URL", description="크롤링할 상품 페이지의 URL") class ResponseSadaguCrawl(BaseModel): - job_id: int - schedule_id: int - schedule_his_id: int - tag: str - product_url: str - use_selenium: bool - include_images: bool - product_detail: Optional[Dict] = None - status: str - crawled_at: Optional[str] = None + job_id: int = Field(..., title="작업 ID", description="작업 식별자") + schedule_id: int = Field(..., title="스케줄 ID", description="스케줄 식별자") + schedule_his_id: int = Field(..., title="스케줄 히스토리 ID", description="스케줄 이력 식별자") + tag: str = Field(..., title="크롤링 태그", description="크롤링 유형 태그") + product_url: str = Field(..., title="상품 URL", description="크롤링된 상품 URL") + product_detail: Optional[Dict] = Field(None, title="상품 상세정보", description="크롤링된 상품의 상세 정보") + status: str = Field(..., title="처리 상태", description="크롤링 처리 결과 상태") + crawled_at: Optional[str] = Field(None, title="크롤링 시간", description="크롤링 완료 시간") # 블로그 생성 class RequestBlogCreate(RequestBase): diff --git a/apps/pre-processing-service/app/service/crawl_service.py b/apps/pre-processing-service/app/service/crawl_service.py index f4d556ba..57be3798 100644 --- a/apps/pre-processing-service/app/service/crawl_service.py +++ b/apps/pre-processing-service/app/service/crawl_service.py @@ -1,4 +1,3 @@ -# app/service/crawl_service.py import time from app.utils.crawler_utils import DetailCrawler from app.errors.CustomException import InvalidItemDataException @@ -14,7 +13,7 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: 선택된 상품의 상세 정보를 크롤링하는 비즈니스 로직입니다. (5단계) 상품 URL을 입력받아 상세 정보를 크롤링하여 딕셔너리로 반환합니다. """ - crawler = DetailCrawler(use_selenium=request.use_selenium) + crawler = DetailCrawler(use_selenium=True) try: print(f"상품 상세 크롤링 시작: {request.product_url}") @@ -22,7 +21,7 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: # 상세 정보 크롤링 실행 product_detail = await crawler.crawl_detail( product_url=str(request.product_url), - include_images=request.include_images + include_images=False ) if not product_detail: @@ -37,8 +36,6 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: "schedule_his_id": request.schedule_his_id, "tag": request.tag, "product_url": str(request.product_url), - "use_selenium": request.use_selenium, - "include_images": request.include_images, "product_detail": product_detail, "status": "success", "crawled_at": time.strftime('%Y-%m-%d %H:%M:%S') From 1afc7715504594a6c375290dfbdfa8e817706a86 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Tue, 9 Sep 2025 04:07:58 +0900 Subject: [PATCH 03/31] =?UTF-8?q?refactor:=20util=EC=97=90=20=EC=9E=88?= =?UTF-8?q?=EB=8A=94=20print=EC=9D=84=20log=EB=A1=9C=20=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/utils/crawler_utils.py | 74 +++++++++++---- .../app/utils/keyword_matcher.py | 40 ++++++-- .../app/utils/similarity_analyzer.py | 94 ++++++++++++------- 3 files changed, 151 insertions(+), 57 deletions(-) diff --git a/apps/pre-processing-service/app/utils/crawler_utils.py b/apps/pre-processing-service/app/utils/crawler_utils.py index 8246788a..c952ad09 100644 --- a/apps/pre-processing-service/app/utils/crawler_utils.py +++ b/apps/pre-processing-service/app/utils/crawler_utils.py @@ -8,6 +8,7 @@ from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.common.exceptions import TimeoutException, NoSuchElementException +from loguru import logger class SearchCrawler: @@ -35,9 +36,9 @@ def _setup_selenium(self): try: self.driver = webdriver.Chrome(options=chrome_options) self.wait = WebDriverWait(self.driver, 10) - print("Selenium WebDriver 초기화 완료") + logger.info("Selenium WebDriver 초기화 완료") except Exception as e: - print(f"Selenium 초기화 실패, httpx로 대체: {e}") + logger.warning(f"Selenium 초기화 실패, httpx로 대체: {e}") self.use_selenium = False self._setup_httpx() @@ -49,6 +50,7 @@ def _setup_httpx(self): }, timeout=30.0 ) + logger.info("httpx 클라이언트 초기화 완료") async def search_products_selenium(self, keyword: str) -> list[dict]: """Selenium을 사용한 상품 검색""" @@ -56,6 +58,7 @@ async def search_products_selenium(self, keyword: str) -> list[dict]: search_url = f"{self.base_url}/shop/search.php?ss_tx={encoded_keyword}" try: + logger.info(f"Selenium 상품 검색 시작: keyword='{keyword}', url='{search_url}'") self.driver.get(search_url) time.sleep(5) @@ -86,11 +89,11 @@ async def search_products_selenium(self, keyword: str) -> list[dict]: seen_urls.add(product['url']) unique_products.append(product) - print(f"Selenium으로 발견한 상품 링크: {len(unique_products)}개") + logger.info(f"Selenium으로 발견한 상품 링크: {len(unique_products)}개 (중복 제거 전: {len(product_links)}개)") return unique_products[:20] except Exception as e: - print(f"Selenium 검색 오류: {e}") + logger.error(f"Selenium 검색 오류: keyword='{keyword}', error='{e}'") return [] async def search_products_httpx(self, keyword: str) -> list[dict]: @@ -99,6 +102,7 @@ async def search_products_httpx(self, keyword: str) -> list[dict]: search_url = f"{self.base_url}/shop/search.php?ss_tx={encoded_keyword}" try: + logger.info(f"httpx 상품 검색 시작: keyword='{keyword}', url='{search_url}'") response = await self.client.get(search_url) response.raise_for_status() soup = BeautifulSoup(response.content, 'html.parser') @@ -117,16 +121,18 @@ async def search_products_httpx(self, keyword: str) -> list[dict]: 'title': title }) - print(f"httpx로 발견한 상품 링크: {len(product_links)}개") + logger.info(f"httpx로 발견한 상품 링크: {len(product_links)}개") return product_links[:20] except Exception as e: - print(f"httpx 검색 오류: {e}") + logger.error(f"httpx 검색 오류: keyword='{keyword}', error='{e}'") return [] async def get_basic_product_info(self, product_url: str) -> dict: """기본 상품 정보만 크롤링""" try: + logger.debug(f"기본 상품 정보 크롤링 시작: url='{product_url}'") + if self.use_selenium: self.driver.get(product_url) self.wait.until(lambda driver: driver.execute_script("return document.readyState") == "complete") @@ -139,13 +145,14 @@ async def get_basic_product_info(self, product_url: str) -> dict: title_element = soup.find('h1', {'id': 'kakaotitle'}) title = title_element.get_text(strip=True) if title_element else "제목 없음" + logger.debug(f"기본 상품 정보 크롤링 완료: title='{title[:50]}'") return { 'url': product_url, 'title': title } except Exception as e: - print(f"기본 상품 크롤링 오류 ({product_url}): {e}") + logger.error(f"기본 상품 크롤링 오류: url='{product_url}', error='{e}'") return None async def close(self): @@ -153,13 +160,15 @@ async def close(self): if self.use_selenium and hasattr(self, 'driver'): try: self.driver.quit() - except Exception: - pass + logger.info("Selenium WebDriver 종료 완료") + except Exception as e: + logger.warning(f"Selenium WebDriver 종료 중 오류: {e}") elif hasattr(self, 'client'): try: await self.client.aclose() - except Exception: - pass + logger.info("httpx 클라이언트 종료 완료") + except Exception as e: + logger.warning(f"httpx 클라이언트 종료 중 오류: {e}") class DetailCrawler(SearchCrawler): @@ -168,6 +177,8 @@ class DetailCrawler(SearchCrawler): async def crawl_detail(self, product_url: str, include_images: bool = False) -> dict: """상품 상세 정보 크롤링""" try: + logger.info(f"상품 상세 크롤링 시작: url='{product_url}', include_images={include_images}") + if self.use_selenium: soup = await self._get_soup_selenium(product_url) else: @@ -190,43 +201,55 @@ async def crawl_detail(self, product_url: str, include_images: bool = False) -> 'crawled_at': time.strftime('%Y-%m-%d %H:%M:%S') } + logger.info( + f"기본 상품 정보 추출 완료: title='{title[:50]}', price={price}, rating={rating}, options_count={len(options)}") + if include_images: - print("이미지 정보 추출 중...") + logger.info("이미지 정보 추출 중...") product_images = self._extract_images(soup) product_data['product_images'] = [{'original_url': img_url} for img_url in product_images] - print(f"추출된 이미지: {len(product_images)}개") + logger.info(f"추출된 이미지: {len(product_images)}개") else: product_data['product_images'] = [] + logger.info(f"상품 상세 크롤링 완료: url='{product_url}'") return product_data except Exception as e: - print(f"크롤링 오류: {e}") + logger.error(f"상품 상세 크롤링 오류: url='{product_url}', error='{e}'") raise Exception(f"크롤링 실패: {str(e)}") async def _get_soup_selenium(self, product_url: str) -> BeautifulSoup: """Selenium으로 HTML 가져오기""" try: + logger.debug(f"Selenium HTML 로딩 시작: url='{product_url}'") self.driver.get(product_url) self.wait.until(lambda driver: driver.execute_script("return document.readyState") == "complete") time.sleep(2) + logger.debug("Selenium HTML 로딩 완료") return BeautifulSoup(self.driver.page_source, 'html.parser') except Exception as e: + logger.error(f"Selenium HTML 로딩 실패: url='{product_url}', error='{e}'") raise Exception(f"Selenium HTML 로딩 실패: {e}") async def _get_soup_httpx(self, product_url: str) -> BeautifulSoup: """httpx로 HTML 가져오기""" try: + logger.debug(f"httpx HTML 요청 시작: url='{product_url}'") response = await self.client.get(product_url) response.raise_for_status() + logger.debug("httpx HTML 요청 완료") return BeautifulSoup(response.content, 'html.parser') except Exception as e: + logger.error(f"httpx HTML 요청 실패: url='{product_url}', error='{e}'") raise Exception(f"HTTP 요청 실패: {e}") def _extract_title(self, soup: BeautifulSoup) -> str: """제목 추출""" title_element = soup.find('h1', {'id': 'kakaotitle'}) - return title_element.get_text(strip=True) if title_element else "제목 없음" + title = title_element.get_text(strip=True) if title_element else "제목 없음" + logger.debug(f"제목 추출: '{title[:50]}'") + return title def _extract_price(self, soup: BeautifulSoup) -> int: """가격 추출""" @@ -245,7 +268,12 @@ def _extract_price(self, soup: BeautifulSoup) -> int: price_match = re.search(r'(\d+)', price_text) if price_match: price = int(price_match.group(1)) + logger.debug(f"가격 추출 성공: {price}원 (selector: {selector})") break + + if price == 0: + logger.debug("가격 추출 실패 - 0원으로 설정") + return price def _extract_rating(self, soup: BeautifulSoup) -> float: @@ -266,7 +294,13 @@ def _extract_rating(self, soup: BeautifulSoup) -> float: rating += 1 elif 'icon_star_half.svg' in src: rating += 0.5 - break + if rating > 0: + logger.debug(f"평점 추출 성공: {rating}점") + break + + if rating == 0.0: + logger.debug("평점 추출 실패 - 0.0점으로 설정") + return rating def _extract_options(self, soup: BeautifulSoup) -> list[dict]: @@ -276,6 +310,8 @@ def _extract_options(self, soup: BeautifulSoup) -> list[dict]: if sku_list: option_items = sku_list.find_all('li', class_=re.compile(r'imgWrapper')) + logger.debug(f"옵션 항목 발견: {len(option_items)}개") + for item in option_items: title_element = item.find('a', title=True) if title_element: @@ -300,7 +336,9 @@ def _extract_options(self, soup: BeautifulSoup) -> list[dict]: 'stock': stock, 'image_url': image_url }) + logger.debug(f"옵션 추출: name='{option_name}', stock={stock}") + logger.info(f"총 {len(options)}개 옵션 추출 완료") return options def _extract_material_info(self, soup: BeautifulSoup) -> dict: @@ -316,7 +354,9 @@ def _extract_material_info(self, soup: BeautifulSoup) -> dict: title = title_element.get_text(strip=True) info = info_element.get_text(strip=True) material_info[title] = info + logger.debug(f"소재 정보 추출: {title}='{info}'") + logger.info(f"총 {len(material_info)}개 소재 정보 추출 완료") return material_info def _extract_images(self, soup: BeautifulSoup) -> list[str]: @@ -336,5 +376,7 @@ def _extract_images(self, soup: BeautifulSoup) -> list[str]: else: continue images.append(src) + logger.debug(f"이미지 URL 추출: {src}") + logger.info(f"총 {len(images)}개 이미지 URL 추출 완료") return images \ No newline at end of file diff --git a/apps/pre-processing-service/app/utils/keyword_matcher.py b/apps/pre-processing-service/app/utils/keyword_matcher.py index 8fab2730..69d87413 100644 --- a/apps/pre-processing-service/app/utils/keyword_matcher.py +++ b/apps/pre-processing-service/app/utils/keyword_matcher.py @@ -1,12 +1,13 @@ from app.core.config import settings # pydantic_settings 기반 +from loguru import logger try: import MeCab - print("MeCab 라이브러리 로딩 성공") + logger.info("MeCab 라이브러리 로딩 성공") MECAB_AVAILABLE = True except ImportError: - print("MeCab 라이브러리를 찾을 수 없습니다. pip install mecab-python3 를 실행해주세요.") + logger.warning("MeCab 라이브러리를 찾을 수 없습니다. pip install mecab-python3 를 실행해주세요.") MeCab = None MECAB_AVAILABLE = False @@ -30,22 +31,25 @@ def __init__(self): test_result = self.mecab.parse("테스트") if test_result and test_result.strip(): self.konlpy_available = True - print(f"MeCab 형태소 분석기 사용 가능 (경로: {settings.mecab_path or '기본'})") + logger.info(f"MeCab 형태소 분석기 사용 가능 (경로: {settings.mecab_path or '기본'})") else: - print("MeCab 테스트 실패") + logger.warning("MeCab 테스트 실패") except Exception as e: - print(f"MeCab 사용 불가 (규칙 기반으로 대체): {e}") + logger.error(f"MeCab 사용 불가 (규칙 기반으로 대체): {e}") else: - print("MeCab 라이브러리가 설치되지 않았습니다. 규칙 기반으로 대체합니다.") + logger.warning("MeCab 라이브러리가 설치되지 않았습니다. 규칙 기반으로 대체합니다.") def analyze_keyword_match(self, title: str, keyword: str) -> dict: """키워드 매칭 분석 결과 반환""" title_lower = title.lower().strip() keyword_lower = keyword.lower().strip() + logger.debug(f"키워드 매칭 분석 시작: title='{title[:50]}', keyword='{keyword}'") + # 1. 완전 포함 검사 exact_match = keyword_lower in title_lower if exact_match: + logger.info(f"완전 포함 매칭 성공: keyword='{keyword}' in title='{title[:50]}'") return { 'is_match': True, 'match_type': 'exact', @@ -57,15 +61,23 @@ def analyze_keyword_match(self, title: str, keyword: str) -> dict: if self.konlpy_available: morphological_result = self._morphological_match(title_lower, keyword_lower) if morphological_result['is_match']: + logger.info(f"형태소 분석 매칭 성공: {morphological_result['reason']}") return morphological_result # 3. 규칙 기반 분석 (MeCab 실패시) simple_result = self._simple_keyword_match(title_lower, keyword_lower) + if simple_result['is_match']: + logger.info(f"규칙 기반 매칭 성공: {simple_result['reason']}") + else: + logger.debug(f"매칭 실패: {simple_result['reason']}") + return simple_result def _morphological_match(self, title: str, keyword: str) -> dict: """형태소 분석 기반 매칭""" try: + logger.debug(f"형태소 분석 시작: title='{title[:30]}', keyword='{keyword}'") + # 키워드 형태소 분석 keyword_result = self.mecab.parse(keyword) keyword_morphs = [] @@ -90,6 +102,8 @@ def _morphological_match(self, title: str, keyword: str) -> dict: if len(morph) >= 1: title_morphs.append(morph) + logger.debug(f"형태소 추출 완료: keyword_morphs={keyword_morphs}, title_morphs={title_morphs}") + # 형태소 매칭 matched = 0 for kw in keyword_morphs: @@ -97,11 +111,15 @@ def _morphological_match(self, title: str, keyword: str) -> dict: for tw in title_morphs: if kw == tw or kw in tw or tw in kw: matched += 1 + logger.debug(f"형태소 매칭: '{kw}' <-> '{tw}'") break match_ratio = matched / len(keyword_morphs) if keyword_morphs else 0 threshold = 0.4 + logger.debug( + f"형태소 매칭 결과: matched={matched}, total={len(keyword_morphs)}, ratio={match_ratio:.3f}, threshold={threshold}") + if match_ratio >= threshold: return { 'is_match': True, @@ -111,27 +129,35 @@ def _morphological_match(self, title: str, keyword: str) -> dict: } except Exception as e: - print(f"형태소 분석 오류: {e}") + logger.error(f"형태소 분석 오류: keyword='{keyword}', title='{title[:30]}', error='{e}'") return {'is_match': False, 'match_type': 'morphological', 'score': 0.0, 'reason': '형태소 분석 실패'} def _simple_keyword_match(self, title: str, keyword: str) -> dict: """간단한 키워드 매칭""" + logger.debug(f"규칙 기반 매칭 시작: title='{title[:30]}', keyword='{keyword}'") + # 공백으로 분리 title_words = title.split() keyword_words = keyword.split() + logger.debug(f"단어 분리 완료: title_words={title_words}, keyword_words={keyword_words}") + matched = 0 for kw in keyword_words: if len(kw) >= 2: for tw in title_words: if kw in tw or tw in kw: matched += 1 + logger.debug(f"규칙 기반 매칭: '{kw}' <-> '{tw}'") break match_ratio = matched / len(keyword_words) if keyword_words else 0 threshold = 0.3 + logger.debug( + f"규칙 기반 매칭 결과: matched={matched}, total={len(keyword_words)}, ratio={match_ratio:.3f}, threshold={threshold}") + if match_ratio >= threshold: return { 'is_match': True, diff --git a/apps/pre-processing-service/app/utils/similarity_analyzer.py b/apps/pre-processing-service/app/utils/similarity_analyzer.py index d155ee2e..61dd9348 100644 --- a/apps/pre-processing-service/app/utils/similarity_analyzer.py +++ b/apps/pre-processing-service/app/utils/similarity_analyzer.py @@ -2,6 +2,7 @@ import numpy as np from sklearn.metrics.pairwise import cosine_similarity from transformers import AutoTokenizer, AutoModel +from loguru import logger class SimilarityAnalyzer: @@ -9,57 +10,82 @@ class SimilarityAnalyzer: def __init__(self): try: + logger.info("KLUE BERT 모델 로딩 시도 중...") self.tokenizer = AutoTokenizer.from_pretrained('klue/bert-base') self.model = AutoModel.from_pretrained('klue/bert-base') - print("KLUE BERT 모델 로딩 성공") + logger.success("KLUE BERT 모델 로딩 성공") except Exception as e: - print(f"KLUE BERT 로딩 실패, 다국어 BERT로 대체: {e}") + logger.warning(f"KLUE BERT 로딩 실패, 다국어 BERT로 대체: {e}") try: + logger.info("다국어 BERT 모델 로딩 시도 중...") self.tokenizer = AutoTokenizer.from_pretrained('bert-base-multilingual-cased') self.model = AutoModel.from_pretrained('bert-base-multilingual-cased') - print("다국어 BERT 모델 로딩 성공") + logger.success("다국어 BERT 모델 로딩 성공") except Exception as e2: - print(f"모든 BERT 모델 로딩 실패: {e2}") + logger.error(f"모든 BERT 모델 로딩 실패: {e2}") raise e2 def get_embedding(self, text: str) -> np.ndarray: """텍스트 임베딩 생성""" - inputs = self.tokenizer(text, return_tensors='pt', padding=True, truncation=True, max_length=128) - with torch.no_grad(): - outputs = self.model(**inputs) - return outputs.last_hidden_state[:, 0, :].numpy() + try: + logger.debug(f"임베딩 생성 시작: text='{text[:50]}'") + inputs = self.tokenizer(text, return_tensors='pt', padding=True, truncation=True, max_length=128) + with torch.no_grad(): + outputs = self.model(**inputs) + embedding = outputs.last_hidden_state[:, 0, :].numpy() + logger.debug(f"임베딩 생성 완료: shape={embedding.shape}") + return embedding + except Exception as e: + logger.error(f"임베딩 생성 오류: text='{text[:30]}', error='{e}'") + raise def calculate_similarity(self, text1: str, text2: str) -> float: """두 텍스트 간 유사도 계산""" - embedding1 = self.get_embedding(text1) - embedding2 = self.get_embedding(text2) - return cosine_similarity(embedding1, embedding2)[0][0] + try: + logger.debug(f"유사도 계산 시작: text1='{text1[:30]}', text2='{text2[:30]}'") + embedding1 = self.get_embedding(text1) + embedding2 = self.get_embedding(text2) + similarity = cosine_similarity(embedding1, embedding2)[0][0] + logger.debug(f"유사도 계산 완료: similarity={similarity:.4f}") + return similarity + except Exception as e: + logger.error(f"유사도 계산 오류: text1='{text1[:30]}', text2='{text2[:30]}', error='{e}'") + raise def analyze_similarity_batch(self, keyword: str, product_titles: list[str]) -> list[dict]: """배치로 유사도 분석""" - keyword_embedding = self.get_embedding(keyword) - results = [] + logger.info(f"배치 유사도 분석 시작: keyword='{keyword}', titles_count={len(product_titles)}") - for i, title in enumerate(product_titles): - try: - title_embedding = self.get_embedding(title) - similarity = cosine_similarity(keyword_embedding, title_embedding)[0][0] + try: + keyword_embedding = self.get_embedding(keyword) + results = [] - results.append({ - 'index': i, - 'title': title, - 'similarity': float(similarity), - 'score': float(similarity) - }) - except Exception as e: - print(f"유사도 계산 오류 (제목: {title[:30]}): {e}") - results.append({ - 'index': i, - 'title': title, - 'similarity': 0.0, - 'score': 0.0 - }) + for i, title in enumerate(product_titles): + try: + logger.debug(f"유사도 계산 중 ({i + 1}/{len(product_titles)}): title='{title[:30]}'") + title_embedding = self.get_embedding(title) + similarity = cosine_similarity(keyword_embedding, title_embedding)[0][0] - # 유사도 기준 내림차순 정렬 - results.sort(key=lambda x: x['similarity'], reverse=True) - return results \ No newline at end of file + results.append({ + 'index': i, + 'title': title, + 'similarity': float(similarity), + 'score': float(similarity) + }) + logger.debug(f"유사도 계산 완료 ({i + 1}/{len(product_titles)}): similarity={similarity:.4f}") + except Exception as e: + logger.error(f"유사도 계산 오류 (제목: {title[:30]}): {e}") + results.append({ + 'index': i, + 'title': title, + 'similarity': 0.0, + 'score': 0.0 + }) + + # 유사도 기준 내림차순 정렬 + results.sort(key=lambda x: x['similarity'], reverse=True) + logger.info(f"배치 유사도 분석 완료: 총 {len(results)}개, 최고 유사도={results[0]['similarity']:.4f}") + return results + except Exception as e: + logger.error(f"배치 유사도 분석 실패: keyword='{keyword}', error='{e}'") + raise \ No newline at end of file From 3478b4a7b78535bc52abc44dd3a9153fa8fc3714 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Tue, 9 Sep 2025 04:10:02 +0900 Subject: [PATCH 04/31] =?UTF-8?q?refactor:=20service=EC=97=90=20=EC=9E=88?= =?UTF-8?q?=EB=8A=94=20print=EC=9D=84=20log=EB=A1=9C=20=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/service/crawl_service.py | 13 +++++++--- .../app/service/match_service.py | 23 ++++++++++++----- .../app/service/search_service.py | 23 ++++++++++------- .../app/service/similarity_service.py | 25 +++++++++++++------ 4 files changed, 58 insertions(+), 26 deletions(-) diff --git a/apps/pre-processing-service/app/service/crawl_service.py b/apps/pre-processing-service/app/service/crawl_service.py index 57be3798..829c5a4b 100644 --- a/apps/pre-processing-service/app/service/crawl_service.py +++ b/apps/pre-processing-service/app/service/crawl_service.py @@ -2,6 +2,7 @@ from app.utils.crawler_utils import DetailCrawler from app.errors.CustomException import InvalidItemDataException from app.model.schemas import RequestSadaguCrawl +from loguru import logger class CrawlService: @@ -16,7 +17,7 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: crawler = DetailCrawler(use_selenium=True) try: - print(f"상품 상세 크롤링 시작: {request.product_url}") + logger.info(f"상품 상세 크롤링 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, product_url={request.product_url}") # 상세 정보 크롤링 실행 product_detail = await crawler.crawl_detail( @@ -25,9 +26,11 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: ) if not product_detail: + logger.error(f"상품 상세 정보 크롤링 실패: url={request.product_url}") raise InvalidItemDataException("상품 상세 정보 크롤링 실패") - print(f"크롤링 완료: {product_detail.get('title', 'Unknown')[:50]}") + product_title = product_detail.get('title', 'Unknown')[:50] + logger.success(f"크롤링 완료: title='{product_title}', price={product_detail.get('price', 0)}, options_count={len(product_detail.get('options', []))}") # 응답 데이터 구성 response_data = { @@ -41,10 +44,12 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: "crawled_at": time.strftime('%Y-%m-%d %H:%M:%S') } + logger.info(f"상품 상세 크롤링 서비스 완료: job_id={request.job_id}, status=success") return response_data except Exception as e: - print(f"크롤링 서비스 오류: {e}") + logger.error(f"크롤링 서비스 오류: job_id={request.job_id}, product_url={request.product_url}, error='{e}'") raise InvalidItemDataException(f"상품 상세 크롤링 오류: {e}") finally: - await crawler.close() \ No newline at end of file + await crawler.close() + logger.debug("크롤러 리소스 정리 완료") \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/match_service.py b/apps/pre-processing-service/app/service/match_service.py index e52f821c..c37a5552 100644 --- a/apps/pre-processing-service/app/service/match_service.py +++ b/apps/pre-processing-service/app/service/match_service.py @@ -1,6 +1,7 @@ from app.utils.keyword_matcher import KeywordMatcher from app.errors.CustomException import InvalidItemDataException from ..model.schemas import RequestSadaguMatch +from loguru import logger class MatchService: @@ -14,7 +15,10 @@ def match_products(self, request: RequestSadaguMatch) -> dict: keyword = request.keyword products = request.search_results + logger.info(f"키워드 매칭 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}', products_count={len(products) if products else 0}") + if not products: + logger.warning(f"매칭할 상품이 없음: keyword='{keyword}'") return { "job_id": request.job_id, "schedule_id": request.schedule_id, @@ -28,17 +32,20 @@ def match_products(self, request: RequestSadaguMatch) -> dict: matcher = KeywordMatcher() matched_products = [] - print(f"키워드 '{keyword}'와 {len(products)}개 상품 매칭 분석 시작...") + logger.info(f"키워드 '{keyword}'와 {len(products)}개 상품 매칭 분석 시작...") for i, product in enumerate(products): title = product.get('title', '') if not title: + logger.debug(f"상품 {i + 1}: 제목이 없어서 스킵") continue + logger.debug(f"상품 {i + 1} 매칭 분석 시작: title='{title[:50]}'") + # 키워드 매칭 분석 match_result = matcher.analyze_keyword_match(title, keyword) - print(f"상품 {i + 1}: {title[:50]} | {match_result['reason']}") + logger.debug(f"상품 {i + 1} 매칭 결과: {match_result['reason']}") if match_result['is_match']: # 매칭된 상품에 매칭 정보 추가 @@ -49,13 +56,17 @@ def match_products(self, request: RequestSadaguMatch) -> dict: 'match_reason': match_result['reason'] } matched_products.append(matched_product) - print(f" ✅ 매칭됨!") - - print(f"매칭 결과: {len(matched_products)}개 상품") + logger.info(f"상품 {i + 1} 매칭 성공: title='{title[:30]}', type={match_result['match_type']}, score={match_result['score']:.3f}") # 매칭 스코어 기준으로 정렬 (높은 순) matched_products.sort(key=lambda x: x['match_info']['match_score'], reverse=True) + logger.success(f"키워드 매칭 완료: keyword='{keyword}', total_products={len(products)}, matched_products={len(matched_products)}") + + if matched_products: + best_match = matched_products[0] + logger.info(f"최고 매칭 상품: title='{best_match['title'][:30]}', score={best_match['match_info']['match_score']:.3f}") + return { "job_id": request.job_id, "schedule_id": request.schedule_id, @@ -66,5 +77,5 @@ def match_products(self, request: RequestSadaguMatch) -> dict: } except Exception as e: - print(f"매칭 서비스 오류: {e}") + logger.error(f"매칭 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'") raise InvalidItemDataException(f"키워드 매칭 실패: {str(e)}") \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/search_service.py b/apps/pre-processing-service/app/service/search_service.py index d676f7dd..073029f8 100644 --- a/apps/pre-processing-service/app/service/search_service.py +++ b/apps/pre-processing-service/app/service/search_service.py @@ -1,6 +1,7 @@ from app.utils.crawler_utils import SearchCrawler from app.errors.CustomException import InvalidItemDataException from ..model.schemas import RequestSadaguSearch +from loguru import logger class SearchService: @@ -15,7 +16,7 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: crawler = SearchCrawler(use_selenium=True) try: - print(f"키워드 '{keyword}'로 상품 검색 시작...") + logger.info(f"상품 검색 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}'") # Selenium 또는 httpx로 상품 검색 if crawler.use_selenium: @@ -24,7 +25,7 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: search_results = await crawler.search_products_httpx(keyword) if not search_results: - print("검색 결과가 없습니다.") + logger.warning(f"검색 결과가 없습니다: keyword='{keyword}'") return { "job_id": request.job_id, "schedule_id": request.schedule_id, @@ -36,16 +37,17 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: # 상품별 기본 정보 수집 (제목이 없는 경우 다시 크롤링) enriched_results = [] - print(f"총 {len(search_results)}개 상품의 기본 정보를 수집 중...") + logger.info(f"총 {len(search_results)}개 상품의 기본 정보를 수집 중...") for i, product in enumerate(search_results): try: # 이미 제목이 있고 유효한 경우 그대로 사용 if product.get('title') and product['title'] != 'Unknown Title' and len(product['title'].strip()) > 0: enriched_results.append(product) + logger.debug(f"상품 {i + 1}: 기존 제목 사용 - '{product['title'][:30]}'") else: # 제목이 없거나 유효하지 않은 경우 다시 크롤링 - print(f"상품 {i + 1}: 제목 재수집 중... ({product['url']})") + logger.debug(f"상품 {i + 1}: 제목 재수집 중... ({product['url']})") basic_info = await crawler.get_basic_product_info(product['url']) if basic_info and basic_info['title'] != "제목 없음": @@ -53,20 +55,22 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: 'url': product['url'], 'title': basic_info['title'] }) + logger.debug(f"상품 {i + 1}: 제목 재수집 성공 - '{basic_info['title'][:30]}'") else: # 그래도 제목을 못 찾으면 제외 - print(f" 제목 추출 실패, 제외") + logger.debug(f"상품 {i + 1}: 제목 추출 실패, 제외") continue # 최대 20개까지만 처리 if len(enriched_results) >= 20: + logger.info("최대 20개 상품 수집 완료") break except Exception as e: - print(f"상품 {i + 1} 처리 중 오류: {e}") + logger.error(f"상품 {i + 1} 처리 중 오류: url={product.get('url', 'N/A')}, error='{e}'") continue - print(f"최종 수집된 유효 상품: {len(enriched_results)}개") + logger.success(f"상품 검색 완료: keyword='{keyword}', 초기검색={len(search_results)}개, 최종유효상품={len(enriched_results)}개") return { "job_id": request.job_id, @@ -78,8 +82,9 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: } except Exception as e: - print(f"검색 서비스 오류: {e}") + logger.error(f"검색 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'") raise InvalidItemDataException(f"상품 검색 실패: {str(e)}") finally: - await crawler.close() \ No newline at end of file + await crawler.close() + logger.debug("검색 크롤러 리소스 정리 완료") \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/similarity_service.py b/apps/pre-processing-service/app/service/similarity_service.py index 3a94800f..a74c3ca1 100644 --- a/apps/pre-processing-service/app/service/similarity_service.py +++ b/apps/pre-processing-service/app/service/similarity_service.py @@ -1,6 +1,7 @@ from app.utils.similarity_analyzer import SimilarityAnalyzer from app.errors.CustomException import InvalidItemDataException from ..model.schemas import RequestSadaguSimilarity +from loguru import logger class SimilarityService: @@ -15,9 +16,12 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict candidates = request.matched_products fallback_products = request.search_results or [] + logger.info(f"유사도 분석 서비스 시작: job_id={request.job_id}, keyword='{keyword}', matched_count={len(candidates) if candidates else 0}, fallback_count={len(fallback_products)}") + # 매칭된 상품이 없으면 전체 검색 결과로 폴백 if not candidates: if not fallback_products: + logger.warning(f"매칭된 상품과 검색 결과가 모두 없음: keyword='{keyword}'") return { "job_id": request.job_id, "schedule_id": request.schedule_id, @@ -28,7 +32,7 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict "status": "success" } - print("매칭된 상품 없음 → 전체 검색 결과에서 유사도 분석") + logger.info("매칭된 상품 없음 → 전체 검색 결과에서 유사도 분석") candidates = fallback_products analysis_mode = "fallback_similarity_only" else: @@ -37,12 +41,13 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict try: analyzer = SimilarityAnalyzer() - print(f"키워드 '{keyword}'와 {len(candidates)}개 상품의 유사도 분석 시작... (모드: {analysis_mode})") + logger.info(f"키워드 '{keyword}'와 {len(candidates)}개 상품의 유사도 분석 시작... (모드: {analysis_mode})") # 한 개만 있으면 바로 선택 if len(candidates) == 1: selected_product = candidates[0] + logger.info("단일 후보 상품 - 유사도 검증 진행") # 유사도 계산 similarity = analyzer.calculate_similarity(keyword, selected_product['title']) @@ -50,6 +55,7 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict if analysis_mode == "fallback_similarity_only": similarity_threshold = 0.3 if similarity < similarity_threshold: + logger.warning(f"단일 상품 유사도 미달: similarity={similarity:.4f} < threshold={similarity_threshold}") return { "job_id": request.job_id, "schedule_id": request.schedule_id, @@ -66,6 +72,8 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict 'analysis_mode': analysis_mode } + logger.success(f"단일 상품 선택 완료: title='{selected_product['title'][:30]}', similarity={similarity:.4f}") + return { "job_id": request.job_id, "schedule_id": request.schedule_id, @@ -77,15 +85,16 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict } # 여러 개가 있으면 유사도 비교 - print("여러 상품 중 최고 유사도로 선택...") + logger.info("여러 상품 중 최고 유사도로 선택...") # 제목만 추출해서 배치 분석 titles = [product['title'] for product in candidates] similarity_results = analyzer.analyze_similarity_batch(keyword, titles) # 결과 출력 - for result in similarity_results: - print(f" {result['title'][:40]} | 유사도: {result['similarity']:.4f}") + logger.info("유사도 분석 결과:") + for i, result in enumerate(similarity_results[:5]): # 상위 5개만 로그 + logger.info(f" {i+1}위: {result['title'][:40]} | 유사도: {result['similarity']:.4f}") # 최고 유사도 선택 best_result = similarity_results[0] @@ -94,6 +103,7 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict # 폴백 모드에서는 임계값 검증 similarity_threshold = 0.3 if analysis_mode == "fallback_similarity_only" and best_result['similarity'] < similarity_threshold: + logger.warning(f"최고 유사도 미달: similarity={best_result['similarity']:.4f} < threshold={similarity_threshold}") return { "job_id": request.job_id, "schedule_id": request.schedule_id, @@ -121,10 +131,11 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict final_score = match_score * 0.4 + similarity_score * 0.6 selected_product['final_score'] = final_score reason = f"종합점수({final_score:.4f}) = 매칭({match_score:.4f})*0.4 + 유사도({similarity_score:.4f})*0.6" + logger.info(f"종합 점수 계산: match_score={match_score:.4f}, similarity_score={similarity_score:.4f}, final_score={final_score:.4f}") else: reason = f"유사도({best_result['similarity']:.4f}) 기준 선택 ({analysis_mode})" - print(f"선택됨: {selected_product['title'][:50]} | {reason}") + logger.success(f"상품 선택 완료: title='{selected_product['title'][:30]}', {reason}") return { "job_id": request.job_id, @@ -137,5 +148,5 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict } except Exception as e: - print(f"유사도 분석 서비스 오류: {e}") + logger.error(f"유사도 분석 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'") raise InvalidItemDataException(f"유사도 분석 실패: {str(e)}") \ No newline at end of file From d8c671cc0aed58c959733590f3b0cf932b2cdf85 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EA=B2=BD=EB=AF=BC?= <153978154+kakusiA@users.noreply.github.com> Date: Tue, 9 Sep 2025 15:33:18 +0900 Subject: [PATCH 05/31] =?UTF-8?q?test:=20=EC=A0=84=EC=B2=B4=20=ED=94=84?= =?UTF-8?q?=EB=A1=9C=EC=84=B8=EC=8A=A4=20=ED=85=8C=EC=8A=A4=ED=8A=B8=20url?= =?UTF-8?q?=20=EA=B0=9C=EB=B0=9C=EB=B0=8F=20db=EC=97=B0=EA=B2=B0=20?= =?UTF-8?q?=ED=85=8C=EC=8A=A4=ED=8A=B8=20(#55)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 전체 프로세스 실행 로직 개발(테스트용) * chore: db testcode 작성 * refetor: tester 리펙토링 product url 구조변경으로 인한 코드 리펛토링 --- .../app/api/endpoints/keywords.py | 14 +- .../app/api/endpoints/test.py | 94 ++++++- .../pre-processing-service/app/core/config.py | 14 +- .../app/db/db_connecter.py | 9 +- apps/pre-processing-service/poetry.lock | 265 +++++++++++++----- apps/pre-processing-service/pyproject.toml | 5 +- 6 files changed, 314 insertions(+), 87 deletions(-) diff --git a/apps/pre-processing-service/app/api/endpoints/keywords.py b/apps/pre-processing-service/app/api/endpoints/keywords.py index 888ff0a0..f445858a 100644 --- a/apps/pre-processing-service/app/api/endpoints/keywords.py +++ b/apps/pre-processing-service/app/api/endpoints/keywords.py @@ -18,13 +18,13 @@ async def search(request: RequestNaverSearch): 이 엔드포인트는 아래와 같은 JSON 요청을 받습니다. RequestBase와 RequestNaverSearch의 모든 필드를 포함해야 합니다. { - "job_id": "job-123", - "schedule_id": "schedule-456", - "schedule_his_id": 789, - "tag": "fastapi", - "category": "tech", - "start_date": "2025-09-01T12:00:00", - "end_date": "2025-09-02T15:00:00" + "job_id":1, + "schedule_id": 1, + "sschdule_his_id":1, + "tag":"naver", + "category":"50000000", + "start_date":"2025-09-01", + "end_date":"2025-09-02" } """ response_data= await keyword_search(request) diff --git a/apps/pre-processing-service/app/api/endpoints/test.py b/apps/pre-processing-service/app/api/endpoints/test.py index 2a33591e..9225c7cd 100644 --- a/apps/pre-processing-service/app/api/endpoints/test.py +++ b/apps/pre-processing-service/app/api/endpoints/test.py @@ -1,9 +1,19 @@ # app/api/endpoints/embedding.py from fastapi import APIRouter +from sqlalchemy import text + from app.decorators.logging import log_api_call from ...errors.CustomException import * from fastapi import APIRouter - +from typing import Mapping, Any, Dict +from ...model.schemas import * +from ...service.blog.naver_blog_post_service import NaverBlogPostService +from ...service.blog.tistory_blog_post_service import TistoryBlogPostService +from ...service.keyword_service import keyword_search +from ...service.match_service import MatchService +from ...service.search_service import SearchService +from ...service.similarity_service import SimilarityService +from ...db.db_connecter import engine # ✅ 우리가 만든 DB 유틸 임포트 # 이 파일만의 독립적인 라우터를 생성합니다. router = APIRouter() @@ -32,4 +42,84 @@ async def trigger_error(item_id: int): raise ValueError("이것은 테스트용 값 오류입니다.") - return {"result": item_id} \ No newline at end of file + return {"result": item_id}\ + +@router.get("/db-test", tags=["db"]) +async def db_test(): + """간단한 DB 연결 및 쿼리 테스트""" + try: + with engine.connect() as conn: + result = conn.execute(text("SELECT NOW() as now")) + row = result.fetchone() + return {"status": "ok", "db_time": str(row.now)} + except Exception as e: + return {"status": "error", "detail": str(e)} + +def with_meta(data: Mapping[str, Any], meta: Mapping[str, Any]) -> Dict[str, Any]: + """요청 payload + 공통 meta 머지""" + return {**meta, **data} + +@router.get("/tester",response_model=None) +async def processing_tester(): + meta = { + "job_id": 1, + "schedule_id": 1, + "schedule_his_id": 1, # ✅ 타이포 수정 + } + request_dict = { + "tag":"naver", + "category":"50000000", + "start_date":"2025-09-01", + "end_date":"2025-09-02" + } + #네이버 키워드 검색 + naver_request = RequestNaverSearch(**with_meta(meta,request_dict)) + response_data = await keyword_search(naver_request) + keyword = response_data.get("keyword") + print(keyword) + + keyword ={ + "keyword" : keyword, + } + + #싸다구 상품 검색 + sadagu_request = RequestSadaguSearch(**with_meta(meta, keyword)) + search_service = SearchService() + keyword_result = await search_service.search_products(sadagu_request) + print(keyword_result) + + #싸다구 상품 매치 + keyword["search_results"] = keyword_result.get("search_results") + keyword_match_request = RequestSadaguMatch(**with_meta(meta, keyword)) + match_service = MatchService() + keyword_match_response = match_service.match_products(keyword_match_request) + print(keyword_match_response) + + #싸다구 상품 유사도 분석 + keyword["matched_products"] = keyword_match_response.get("matched_products") + keyword_similarity_request = RequestSadaguSimilarity(**with_meta(meta, keyword)) + similarity_service = SimilarityService() + keyword_similarity_response = similarity_service.select_product_by_similarity( + keyword_similarity_request + ) + print(keyword_similarity_response) + + #싸다구 상품 크롤링 + + + + #블로그 생성 + + + + #블로그 배포 + tistory_service = TistoryBlogPostService() + result = tistory_service.post_content( + title = "안녕하살법", + content = "안녕하살법 받아치기", + tags= ["퉁퉁퉁사후르","짜라짜라"] + ) + print(result) + + + return "구웃" \ No newline at end of file diff --git a/apps/pre-processing-service/app/core/config.py b/apps/pre-processing-service/app/core/config.py index 52930483..aab10515 100644 --- a/apps/pre-processing-service/app/core/config.py +++ b/apps/pre-processing-service/app/core/config.py @@ -69,7 +69,7 @@ class BaseSettingsConfig(BaseSettings): db_user: str db_pass: str db_name: str - env_name: str = ".dev" + env_name: str # MeCab 사전 경로 (자동 감지) mecab_path: Optional[str] = None @@ -77,6 +77,8 @@ class BaseSettingsConfig(BaseSettings): # 외부 서비스 계정 정보 naver_id: Optional[str] = None naver_password: Optional[str] = None + tistory_blog_name: Optional[str] = None + tistory_blog_url: Optional[str] = None tistory_id: Optional[str] = None tistory_password: Optional[str] = None @@ -92,18 +94,22 @@ def __init__(self, **kwargs): @property def db_url(self) -> str: """개별 필드를 사용하여 DB URL을 동적으로 생성""" - return f"postgresql://{self.db_user}:{self.db_pass}@{self.db_host}:{self.db_port}/{self.db_name}" + return( + f"mysql+pymysql://{self.db_user}:" + f"{self.db_pass}" + f"@{self.db_host}:{self.db_port}/{self.db_name}" + ) model_config = SettingsConfigDict(env_file=['.env']) # 환경별 설정 클래스 class DevSettings(BaseSettingsConfig): - model_config = SettingsConfigDict(env_file=['.env', '.dev.env']) + model_config = SettingsConfigDict(env_file=['.env', '.env.dev']) class PrdSettings(BaseSettingsConfig): - model_config = SettingsConfigDict(env_file=['.env', '.prd.env']) + model_config = SettingsConfigDict(env_file=['.env', '.env.prod']) def get_settings() -> BaseSettingsConfig: """환경 변수에 따라 적절한 설정 객체를 반환하는 함수""" diff --git a/apps/pre-processing-service/app/db/db_connecter.py b/apps/pre-processing-service/app/db/db_connecter.py index 0ed48b04..2612cd65 100644 --- a/apps/pre-processing-service/app/db/db_connecter.py +++ b/apps/pre-processing-service/app/db/db_connecter.py @@ -1 +1,8 @@ -from ..core.config import settings \ No newline at end of file +from ..core.config import settings +from sqlalchemy import create_engine, text +from app.core.config import settings + +engine = create_engine( + settings.db_url, + pool_pre_ping=True, # 연결 유효성 체크 +) \ No newline at end of file diff --git a/apps/pre-processing-service/poetry.lock b/apps/pre-processing-service/poetry.lock index 30f79248..70da2b53 100644 --- a/apps/pre-processing-service/poetry.lock +++ b/apps/pre-processing-service/poetry.lock @@ -452,6 +452,75 @@ test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask[dataframe,test]", "moto test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard ; python_version < \"3.14\""] tqdm = ["tqdm"] +[[package]] +name = "greenlet" +version = "3.2.4" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" +files = [ + {file = "greenlet-3.2.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8c68325b0d0acf8d91dde4e6f930967dd52a5302cd4062932a6b2e7c2969f47c"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:94385f101946790ae13da500603491f04a76b6e4c059dab271b3ce2e283b2590"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f10fd42b5ee276335863712fa3da6608e93f70629c631bf77145021600abc23c"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c8c9e331e58180d0d83c5b7999255721b725913ff6bc6cf39fa2a45841a4fd4b"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58b97143c9cc7b86fc458f215bd0932f1757ce649e05b640fea2e79b54cedb31"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f"}, + {file = "greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c"}, + {file = "greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa"}, + {file = "greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9"}, + {file = "greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f"}, + {file = "greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02"}, + {file = "greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae"}, + {file = "greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b"}, + {file = "greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c17b6b34111ea72fc5a4e4beec9711d2226285f0386ea83477cbb97c30a3f3a5"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337"}, + {file = "greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01"}, + {file = "greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:18d9260df2b5fbf41ae5139e1be4e796d99655f023a636cd0e11e6406cca7d58"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:671df96c1f23c4a0d4077a325483c1503c96a1b7d9db26592ae770daa41233d4"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:16458c245a38991aa19676900d48bd1a6f2ce3e16595051a4db9d012154e8433"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98"}, + {file = "greenlet-3.2.4-cp39-cp39-win32.whl", hash = "sha256:65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b"}, + {file = "greenlet-3.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb"}, + {file = "greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil", "setuptools"] + [[package]] name = "gunicorn" version = "23.0.0" @@ -1432,6 +1501,33 @@ files = [ [package.extras] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pymysql" +version = "1.1.2" +description = "Pure Python MySQL Driver" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pymysql-1.1.2-py3-none-any.whl", hash = "sha256:e6b1d89711dd51f8f74b1631fe08f039e7d76cf67a42a323d3178f0f25762ed9"}, + {file = "pymysql-1.1.2.tar.gz", hash = "sha256:4961d3e165614ae65014e361811a724e2044ad3ea3739de9903ae7c21f539f03"}, +] + +[package.extras] +ed25519 = ["PyNaCl (>=1.4.0)"] +rsa = ["cryptography"] + +[[package]] +name = "pyperclip" +version = "1.9.0" +description = "A cross-platform clipboard module for Python. (Only handles plain text for now.)" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310"}, +] + [[package]] name = "pysocks" version = "1.7.1" @@ -1482,77 +1578,6 @@ files = [ [package.extras] cli = ["click (>=5.0)"] -[[package]] -name = "python-mecab-ko" -version = "1.3.7" -description = "A python binding for mecab-ko" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "python_mecab_ko-1.3.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4760efe6327b5707f55db2b4a6f8fb047fe8e068577a9a913304bb0d12e7de44"}, - {file = "python_mecab_ko-1.3.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:27a03ae50aabc7f057c26ad5e4c6c4d431cf696778e45025e208d2f6b7bf115d"}, - {file = "python_mecab_ko-1.3.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8d2539e7ea91eb0705381f75e64c626be4eba69824a8c82fbdf2c4e48a1d389"}, - {file = "python_mecab_ko-1.3.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2bad59670b280548b9060c1b511f6f088c09b977355de7192e9d0044b8f724b"}, - {file = "python_mecab_ko-1.3.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8c4347f075b8748cbc5695f6b91120b0e388344eab5d9c26d50ad3c57c35754"}, - {file = "python_mecab_ko-1.3.7-cp310-cp310-win32.whl", hash = "sha256:682875cd1cafeeb2946b856b1b479144b4e8d28363b6bff3ae1c8b294994742b"}, - {file = "python_mecab_ko-1.3.7-cp310-cp310-win_amd64.whl", hash = "sha256:ef5a6bb8d4611dd621436492adb140c280fe4e155097c5dcc8b1fcdd203abfb6"}, - {file = "python_mecab_ko-1.3.7-cp310-cp310-win_arm64.whl", hash = "sha256:14b070b886d864964710c6a396556d8509be2dce1618f401192fd7c213eb4608"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:da1cc9de07e75beb2d4067c1c072ecabdb293440633fc0e32f2875a14e703829"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:523153de14262c413838852742541d48ad99d41ab8f6c5413a226319ee4c25ef"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:010ca2297e63d08a772466dd401d36ed9914502b8794c08948427a4083b3202c"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d7b35116e98fb736f7c9550eb1a74cfb6aa35c39b0b43cbe7a8837bfa3cd39d4"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0933d3fcb84f6ed36cce49f1939604ac0fcaf4460441e832cb98ca1bdce74a37"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-win32.whl", hash = "sha256:644207821de8c76ff2442d84c8902dd16b239fdc80c79d0774f8b9ea446c4218"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-win_amd64.whl", hash = "sha256:a456e40817dc73f58d7f11ff01af4394cdd1ceab2e98feddde625587603d65f7"}, - {file = "python_mecab_ko-1.3.7-cp311-cp311-win_arm64.whl", hash = "sha256:9f5e40101426b87c99ecb1268f56402f9c44f9d06271b28ccc1ec1bc6bc582ac"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7721f69381dac572a1598e5906cc5faba233ed48bc6ff8672082a519d7db0ba1"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:eae5eb6178b06019e3773e9dde126dd29df5ed417406be5611ebdd0f8839c1e1"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e90e8c1009f8f6aa0dfc43c916ff481dc79aa5a7e528a41a193add9c61ac6d1"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a205ca4da908df39d6d70f968426d0e9dc79274a6d34b13a5588ab52f0e12be8"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c0e98a7d94278f4f5d93f03e35cc8044460c0076ab4698b764d5c44bd897dbe"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-win32.whl", hash = "sha256:3145c53772e842a046fdbf0659f0e5235e16d51b0bb8c0d3e8e078dc57d22373"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-win_amd64.whl", hash = "sha256:3387906e66109989603b877899d1ae3a0132795c9c73ad91a5e7c4c077177351"}, - {file = "python_mecab_ko-1.3.7-cp312-cp312-win_arm64.whl", hash = "sha256:13126509630e47fc89a8c575f5af3eed1bc09370e978b331caf32325e6b98383"}, - {file = "python_mecab_ko-1.3.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198c0b9a832966927ceceda599b8d2f38426d11d25defa0d4ed819e3d00bfa91"}, - {file = "python_mecab_ko-1.3.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:661da586a6783cd60dc93ebb4dcc182e5cb3d37b98d25fe741c8eb2aabd59b30"}, - {file = "python_mecab_ko-1.3.7-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eab31739769b1ad90fcd81f7e2319f2bc33f7b85aee3a5cec230352963678ac0"}, - {file = "python_mecab_ko-1.3.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10ea3c549eac11cdf9e994ce65fb34653a142d04eaa519c2ba3a99646cb21991"}, - {file = "python_mecab_ko-1.3.7-cp37-cp37m-win32.whl", hash = "sha256:ec22b9f8b7d5ec62d2af48d252f0172e1c4dfdf1387bad356f62b73084bac675"}, - {file = "python_mecab_ko-1.3.7-cp37-cp37m-win_amd64.whl", hash = "sha256:e0fb84a0eda5f77dbb456fb7eba9715349668b2a9bb4235df0904620653eabda"}, - {file = "python_mecab_ko-1.3.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5ad754804a5a5b64b62d77a962d33ef6e931765cede89f880e02e3d18971a5bd"}, - {file = "python_mecab_ko-1.3.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64346e4a627ad3b56647f2d6909ba52bd25b5b29f8d320944ed9dce602ba0b75"}, - {file = "python_mecab_ko-1.3.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0b50438fb570299bd7e4c30549373c171b94f6400c32b0b455b37047e5ed7ed"}, - {file = "python_mecab_ko-1.3.7-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4321180be1e5446bb97e8f803079deb72500af7bbb7d0e2c49ec9995ec3674f5"}, - {file = "python_mecab_ko-1.3.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8c297e6e5a8a0aacd75e9efee465d0bf7f6d1b9f0ccb9b18916e9203ea0e349"}, - {file = "python_mecab_ko-1.3.7-cp38-cp38-win32.whl", hash = "sha256:8015778e03186f8d2e7b0f1c0c9b753617d848cea2c4eba09e59e081080da92a"}, - {file = "python_mecab_ko-1.3.7-cp38-cp38-win_amd64.whl", hash = "sha256:782bf38e817ad54ca16dccd2e4edf083829e259aac1da3187ccc1fd305dfb503"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4fdae16e907470cec155721cc0f849a9d52e01eae316aae53101fa236069505b"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:691bed2317e4cbbf4f00fc11a59d6d95412b72b9bd6eea037880df95fcd7e6a0"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d147ce60440cd04e3e113508f1c7f04ed39bcbb7991921d9c66b060709af253e"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99f02fb9816dda3258726b33423f0b48429582d4386529c08caa01c0d4e8365b"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96c719105eae24c24882fbea821df7a26c961590d06ff932599690785d7efe5"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-win32.whl", hash = "sha256:288ff89e4d1318923acecccfbb0b9d4937a8f93ac27e4868e08c778629d0522a"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-win_amd64.whl", hash = "sha256:12c4b86041350024355d51dd16cb989fd027e142c8083d3b12d21b9262522054"}, - {file = "python_mecab_ko-1.3.7-cp39-cp39-win_arm64.whl", hash = "sha256:2a84df563961a6507e170f78b010716a69874fc4b00ce503280f5eb7d62ccd1c"}, - {file = "python_mecab_ko-1.3.7.tar.gz", hash = "sha256:69cbb2ac559a3169c22b1a3aa5d3c247d2f7902d9fe7dc9966189a9c7694af0b"}, -] - -[package.dependencies] -python-mecab-ko-dic = "*" - -[[package]] -name = "python-mecab-ko-dic" -version = "2.1.1.post2" -description = "mecab-ko-dic packaged for Python" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "python-mecab-ko-dic-2.1.1.post2.tar.gz", hash = "sha256:2c423713bdc475345ec98cd084b30759458f8f06c38a9ef94ab8687942c2cd34"}, - {file = "python_mecab_ko_dic-2.1.1.post2-py3-none-any.whl", hash = "sha256:ef8f4e80c8976f1340a7264abb0c96f384fe059fd897584aeba0151753c6ae9b"}, -] - [[package]] name = "pyyaml" version = "6.0.2" @@ -1976,6 +2001,102 @@ files = [ {file = "soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f"}, ] +[[package]] +name = "sqlalchemy" +version = "2.0.43" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "SQLAlchemy-2.0.43-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21ba7a08a4253c5825d1db389d4299f64a100ef9800e4624c8bf70d8f136e6ed"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11b9503fa6f8721bef9b8567730f664c5a5153d25e247aadc69247c4bc605227"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07097c0a1886c150ef2adba2ff7437e84d40c0f7dcb44a2c2b9c905ccfc6361c"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cdeff998cb294896a34e5b2f00e383e7c5c4ef3b4bfa375d9104723f15186443"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:bcf0724a62a5670e5718957e05c56ec2d6850267ea859f8ad2481838f889b42c"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-win32.whl", hash = "sha256:c697575d0e2b0a5f0433f679bda22f63873821d991e95a90e9e52aae517b2e32"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-win_amd64.whl", hash = "sha256:d34c0f6dbefd2e816e8f341d0df7d4763d382e3f452423e752ffd1e213da2512"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70322986c0c699dca241418fcf18e637a4369e0ec50540a2b907b184c8bca069"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87accdbba88f33efa7b592dc2e8b2a9c2cdbca73db2f9d5c510790428c09c154"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c00e7845d2f692ebfc7d5e4ec1a3fd87698e4337d09e58d6749a16aedfdf8612"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:022e436a1cb39b13756cf93b48ecce7aa95382b9cfacceb80a7d263129dfd019"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c5e73ba0d76eefc82ec0219d2301cb33bfe5205ed7a2602523111e2e56ccbd20"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9c2e02f06c68092b875d5cbe4824238ab93a7fa35d9c38052c033f7ca45daa18"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-win32.whl", hash = "sha256:e7a903b5b45b0d9fa03ac6a331e1c1d6b7e0ab41c63b6217b3d10357b83c8b00"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-win_amd64.whl", hash = "sha256:4bf0edb24c128b7be0c61cd17eef432e4bef507013292415f3fb7023f02b7d4b"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52d9b73b8fb3e9da34c2b31e6d99d60f5f99fd8c1225c9dad24aeb74a91e1d29"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f42f23e152e4545157fa367b2435a1ace7571cab016ca26038867eb7df2c3631"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fb1a8c5438e0c5ea51afe9c6564f951525795cf432bed0c028c1cb081276685"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db691fa174e8f7036afefe3061bc40ac2b770718be2862bfb03aabae09051aca"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2b3b4927d0bc03d02ad883f402d5de201dbc8894ac87d2e981e7d87430e60d"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d3d9b904ad4a6b175a2de0738248822f5ac410f52c2fd389ada0b5262d6a1e3"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-win32.whl", hash = "sha256:5cda6b51faff2639296e276591808c1726c4a77929cfaa0f514f30a5f6156921"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-win_amd64.whl", hash = "sha256:c5d1730b25d9a07727d20ad74bc1039bbbb0a6ca24e6769861c1aa5bf2c4c4a8"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:20d81fc2736509d7a2bd33292e489b056cbae543661bb7de7ce9f1c0cd6e7f24"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b9fc27650ff5a2c9d490c13c14906b918b0de1f8fcbb4c992712d8caf40e83"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6772e3ca8a43a65a37c88e2f3e2adfd511b0b1da37ef11ed78dea16aeae85bd9"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a113da919c25f7f641ffbd07fbc9077abd4b3b75097c888ab818f962707eb48"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4286a1139f14b7d70141c67a8ae1582fc2b69105f1b09d9573494eb4bb4b2687"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:529064085be2f4d8a6e5fab12d36ad44f1909a18848fcfbdb59cc6d4bbe48efe"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-win32.whl", hash = "sha256:b535d35dea8bbb8195e7e2b40059e2253acb2b7579b73c1b432a35363694641d"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-win_amd64.whl", hash = "sha256:1c6d85327ca688dbae7e2b06d7d84cfe4f3fffa5b5f9e21bb6ce9d0e1a0e0e0a"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e7c08f57f75a2bb62d7ee80a89686a5e5669f199235c6d1dac75cd59374091c3"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:14111d22c29efad445cd5021a70a8b42f7d9152d8ba7f73304c4d82460946aaa"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b27b56eb2f82653168cefe6cb8e970cdaf4f3a6cb2c5e3c3c1cf3158968ff9"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c5a9da957c56e43d72126a3f5845603da00e0293720b03bde0aacffcf2dc04f"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d79f9fdc9584ec83d1b3c75e9f4595c49017f5594fee1a2217117647225d738"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9df7126fd9db49e3a5a3999442cc67e9ee8971f3cb9644250107d7296cb2a164"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-win32.whl", hash = "sha256:7f1ac7828857fcedb0361b48b9ac4821469f7694089d15550bbcf9ab22564a1d"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-win_amd64.whl", hash = "sha256:971ba928fcde01869361f504fcff3b7143b47d30de188b11c6357c0505824197"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4e6aeb2e0932f32950cf56a8b4813cb15ff792fc0c9b3752eaf067cfe298496a"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:61f964a05356f4bca4112e6334ed7c208174511bd56e6b8fc86dad4d024d4185"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46293c39252f93ea0910aababa8752ad628bcce3a10d3f260648dd472256983f"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:136063a68644eca9339d02e6693932116f6a8591ac013b0014479a1de664e40a"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6e2bf13d9256398d037fef09fd8bf9b0bf77876e22647d10761d35593b9ac547"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:44337823462291f17f994d64282a71c51d738fc9ef561bf265f1d0fd9116a782"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-win32.whl", hash = "sha256:13194276e69bb2af56198fef7909d48fd34820de01d9c92711a5fa45497cc7ed"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-win_amd64.whl", hash = "sha256:334f41fa28de9f9be4b78445e68530da3c5fa054c907176460c81494f4ae1f5e"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ceb5c832cc30663aeaf5e39657712f4c4241ad1f638d487ef7216258f6d41fe7"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11f43c39b4b2ec755573952bbcc58d976779d482f6f832d7f33a8d869ae891bf"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:413391b2239db55be14fa4223034d7e13325a1812c8396ecd4f2c08696d5ccad"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c379e37b08c6c527181a397212346be39319fb64323741d23e46abd97a400d34"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03d73ab2a37d9e40dec4984d1813d7878e01dbdc742448d44a7341b7a9f408c7"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8cee08f15d9e238ede42e9bbc1d6e7158d0ca4f176e4eab21f88ac819ae3bd7b"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-win32.whl", hash = "sha256:b3edaec7e8b6dc5cd94523c6df4f294014df67097c8217a89929c99975811414"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-win_amd64.whl", hash = "sha256:227119ce0a89e762ecd882dc661e0aa677a690c914e358f0dd8932a2e8b2765b"}, + {file = "sqlalchemy-2.0.43-py3-none-any.whl", hash = "sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc"}, + {file = "sqlalchemy-2.0.43.tar.gz", hash = "sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417"}, +] + +[package.dependencies] +greenlet = {version = ">=1", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + [[package]] name = "starlette" version = "0.47.2" @@ -2398,4 +2519,4 @@ h11 = ">=0.9.0,<1" [metadata] lock-version = "2.1" python-versions = ">=3.11,<3.14" -content-hash = "30722a9f9497e4264b15e7af55b9f8eeb44781a8800f571e477fc146a340179e" +content-hash = "72004c91cb88e0c411cff8447951d5c36a0a44c0b8cf3937a24860c10700251f" diff --git a/apps/pre-processing-service/pyproject.toml b/apps/pre-processing-service/pyproject.toml index af7d2124..f3b4d06c 100644 --- a/apps/pre-processing-service/pyproject.toml +++ b/apps/pre-processing-service/pyproject.toml @@ -1,5 +1,5 @@ [project] -name = "pre-processing-service" +name = "app" version = "0.1.0" description = "" authors = [ @@ -29,6 +29,9 @@ dependencies = [ "httpx (>=0.28.1,<0.29.0)", "asyncpg (>=0.30.0,<0.31.0)", "gunicorn (>=23.0.0,<24.0.0)", + "pyperclip (>=1.9.0,<2.0.0)", + "pymysql (>=1.1.2,<2.0.0)", + "sqlalchemy (>=2.0.43,<3.0.0)", ] From 5ab7698447492bc9de915e6e0776cd826f0c56b8 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Tue, 9 Sep 2025 16:47:31 +0900 Subject: [PATCH 06/31] =?UTF-8?q?refactor:=20=EC=97=94=EB=93=9C=ED=8F=AC?= =?UTF-8?q?=EC=9D=B8=ED=8A=B8=20title,=20description=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/model/schemas.py | 52 ++++++++++--------- 1 file changed, 27 insertions(+), 25 deletions(-) diff --git a/apps/pre-processing-service/app/model/schemas.py b/apps/pre-processing-service/app/model/schemas.py index 9a4893cd..ffc5251b 100644 --- a/apps/pre-processing-service/app/model/schemas.py +++ b/apps/pre-processing-service/app/model/schemas.py @@ -5,28 +5,28 @@ # 기본 요청 class RequestBase(BaseModel): - job_id: int - schedule_id: int - schedule_his_id: Optional[int] = None + job_id: int = Field(..., title="작업 ID", description="현재 실행 중인 작업의 고유 식별자") + schedule_id: int = Field(..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자") + schedule_his_id: Optional[int] = Field(None, title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자") # 기본 응답 class ResponseBase(BaseModel): - job_id: int - schedule_id: int - schedule_his_id: Optional[int] = None - status: str + job_id: int = Field(..., title="작업 ID", description="현재 실행 중인 작업의 고유 식별자") + schedule_id: int = Field(..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자") + schedule_his_id: Optional[int] = Field(None, title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자") + status: str = Field(..., title="상태", description="요청 처리 상태") # 네이버 키워드 추출 class RequestNaverSearch(RequestBase): - tag: str - category: Optional[str] = None - start_date: Optional[str] = None - end_date: Optional[str] = None + tag: str = Field(..., title="태그", description="데이터랩/스토어 태그 구분") + category: Optional[str] = Field(None, title="카테고리", description="검색할 카테고리") + start_date: Optional[str] = Field(None, title="시작일", description="검색 시작 날짜 (YYYY-MM-DD)") + end_date: Optional[str] = Field(None, title="종료일", description="검색 종료 날짜 (YYYY-MM-DD)") class ResponseNaverSearch(ResponseBase): - category: Optional[str] = None - keyword: str - total_keyword: Dict[int, str] + category: Optional[str] = Field(None, title="카테고리", description="검색 카테고리") + keyword: str = Field(..., title="키워드", description="검색에 사용된 키워드") + total_keyword: Dict[int, str] = Field(..., title="총 키워드", description="키워드별 총 검색 결과") # 2단계: 검색 class RequestSadaguSearch(RequestBase): @@ -74,23 +74,25 @@ class ResponseSadaguCrawl(BaseModel): status: str = Field(..., title="처리 상태", description="크롤링 처리 결과 상태") crawled_at: Optional[str] = Field(None, title="크롤링 시간", description="크롤링 완료 시간") -# 블로그 생성 +# 블로그 콘텐츠 생성 class RequestBlogCreate(RequestBase): - tag: str - category: str + tag: str = Field(..., title="블로그 태그", description="블로그 플랫폼 종류 태그") + category: str = Field(..., title="카테고리", description="검색(상품) 카테고리") class ResponseBlogCreate(ResponseBase): pass # 블로그 배포 class RequestBlogPublish(RequestBase): - tag: str - category: str - - # 임의로 추가 - title: str - content: str - tags: List[str] + blog_id: str = Field(..., description= "블로그 아이디") + blog_pw: str = Field(..., description= "블로그 비밀번호") + post_title: str = Field(..., description= "포스팅 제목") + post_content: str = Field(..., description= "포스팅 내용") + post_tags: List[str] = Field(default=[], description= "포스팅 태그 목록") class ResponseBlogPublish(ResponseBase): - metadata: Optional[Dict[str, Any]] \ No newline at end of file + # 디버깅 용 + metadata: Optional[Dict[str, Any]] = Field(None, description= "포스팅 관련 메타데이터") + + # 프로덕션 용 + # post_url: str = Field(..., description="포스팅 URL") \ No newline at end of file From 1b96941ebf011b7d0c5de0f3894bde67a0b2dd7f Mon Sep 17 00:00:00 2001 From: thkim7 Date: Tue, 9 Sep 2025 16:55:10 +0900 Subject: [PATCH 07/31] =?UTF-8?q?refactor:=20=EC=97=94=EB=93=9C=ED=8F=AC?= =?UTF-8?q?=EC=9D=B8=ED=8A=B8=20title,=20description=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/errors/BlogPostingException.py | 13 +++++ .../service/blog/base_blog_post_service.py | 58 +++++++++++++------ 2 files changed, 52 insertions(+), 19 deletions(-) diff --git a/apps/pre-processing-service/app/errors/BlogPostingException.py b/apps/pre-processing-service/app/errors/BlogPostingException.py index d0b360a8..d8a70c2f 100644 --- a/apps/pre-processing-service/app/errors/BlogPostingException.py +++ b/apps/pre-processing-service/app/errors/BlogPostingException.py @@ -76,4 +76,17 @@ def __init__(self, config_item: str): status_code=500, detail=f"블로그 서비스 설정 오류: {config_item}", code="BLOG_CONFIGURATION_ERROR" + ) + +class BloggerApiException(CustomException): + """ + Blogger API 관련 오류 예외 + @:param reason: 실패 이유 + @:param detail: 상세 오류 메시지 + """ + def __init__(self, reason: str, detail: str): + super().__init__( + status_code=500, + detail=f"Blogger API 오류: {reason} ({detail})", + code="BLOGGER_API_ERROR" ) \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/blog/base_blog_post_service.py b/apps/pre-processing-service/app/service/blog/base_blog_post_service.py index 55aa34e9..b28c1081 100644 --- a/apps/pre-processing-service/app/service/blog/base_blog_post_service.py +++ b/apps/pre-processing-service/app/service/blog/base_blog_post_service.py @@ -1,34 +1,61 @@ from abc import ABC, abstractmethod -from typing import Dict +from typing import Dict, List, Optional from app.utils.crawling_util import CrawlingUtil from app.errors.BlogPostingException import * from app.errors.CrawlingException import * + class BaseBlogPostService(ABC): """ 블로그 포스팅 서비스 추상 클래스 """ - def __init__(self): + def __init__(self, config_file="blog_config.json"): """공통 초기화 로직""" - try: - self.crawling_service = CrawlingUtil() - self.web_driver = self.crawling_service.get_driver() - self.wait_driver = self.crawling_service.get_wait() - except Exception: - raise WebDriverConnectionException() + # Selenium 기반 서비스를 위한 초기화 + if self._requires_webdriver(): + try: + self.crawling_service = CrawlingUtil() + self.web_driver = self.crawling_service.get_driver() + self.wait_driver = self.crawling_service.get_wait() + except Exception: + raise WebDriverConnectionException() + else: + # API 기반 서비스의 경우 WebDriver가 필요 없음 + self.crawling_service = None + self.web_driver = None + self.wait_driver = None + + # API 기반 서비스를 위한 초기화 + self.config_file = config_file + self.config = {} + self.current_upload_account = None + + # API 관련 속성들 (사용하지 않는 서비스에서는 None으로 유지) + self.blogger_service = None + self.blog_id = None + self.scopes = None self._load_config() + def _requires_webdriver(self) -> bool: + """ + 서브클래스에서 WebDriver가 필요한지 여부를 반환 + 기본값은 True (Selenium 기반), API 기반 서비스에서는 False로 오버라이드 + """ + return True + @abstractmethod def _load_config(self) -> None: """플랫폼별 설정 로드""" pass - @abstractmethod def _login(self) -> None: - """플랫폼별 로그인 구현""" + """ + 플랫폼별 로그인 구현 (API 기반 서비스의 경우 인증으로 대체) + 기본 구현은 아무것도 하지 않음 (API 서비스용) + """ pass @abstractmethod @@ -54,14 +81,7 @@ def _validate_content(self, title: str, content: str, tags: Optional[List[str]] :param content: 포스트 내용 :param tags: 포스트 태그 리스트 """ - # if not title or not title.strip(): - # raise BlogContentValidationException("title", "제목이 비어있습니다") - # - # if not content or not content.strip(): - # raise BlogContentValidationException("content", "내용이 비어있습니다") - # - # if tags is None: - # raise BlogContentValidationException("tags", "태그가 비어있습니다") + pass def post_content(self, title: str, content: str, tags: List[str] = None) -> Dict: """ @@ -74,7 +94,7 @@ def post_content(self, title: str, content: str, tags: List[str] = None) -> Dict # 1. 콘텐츠 유효성 검사 self._validate_content(title, content, tags) - # 2. 로그인 + # 2. 로그인 (Selenium 기반) 또는 인증 (API 기반) self._login() # 3. 포스트 작성 및 발행 From aeff8fee67ad465dbc5810b5fca0ca57c29f2dc4 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Tue, 9 Sep 2025 16:57:56 +0900 Subject: [PATCH 08/31] =?UTF-8?q?refactor:=20=EC=97=94=EB=93=9C=ED=8F=AC?= =?UTF-8?q?=EC=9D=B8=ED=8A=B8=20title,=20description=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/pre-processing-service/app/model/schemas.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/apps/pre-processing-service/app/model/schemas.py b/apps/pre-processing-service/app/model/schemas.py index ffc5251b..36d0514f 100644 --- a/apps/pre-processing-service/app/model/schemas.py +++ b/apps/pre-processing-service/app/model/schemas.py @@ -57,21 +57,14 @@ class ResponseSadaguSimilarity(ResponseBase): reason: Optional[str] = Field(None, title="선택 이유", description="상품 선택 근거 및 점수 정보") # 사다구몰 크롤링 -class RequestSadaguCrawl(BaseModel): - job_id: int = Field(..., title="작업 ID", description="현재 실행중인 작업의 고유 식별자") - schedule_id: int = Field(..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자") - schedule_his_id: int = Field(..., title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자") +class RequestSadaguCrawl(RequestBase): tag: str = Field(..., title="크롤링 태그", description="크롤링 유형을 구분하는 태그 (예: 'detail')") product_url: HttpUrl = Field(..., title="상품 URL", description="크롤링할 상품 페이지의 URL") -class ResponseSadaguCrawl(BaseModel): - job_id: int = Field(..., title="작업 ID", description="작업 식별자") - schedule_id: int = Field(..., title="스케줄 ID", description="스케줄 식별자") - schedule_his_id: int = Field(..., title="스케줄 히스토리 ID", description="스케줄 이력 식별자") +class ResponseSadaguCrawl(ResponseBase): tag: str = Field(..., title="크롤링 태그", description="크롤링 유형 태그") product_url: str = Field(..., title="상품 URL", description="크롤링된 상품 URL") product_detail: Optional[Dict] = Field(None, title="상품 상세정보", description="크롤링된 상품의 상세 정보") - status: str = Field(..., title="처리 상태", description="크롤링 처리 결과 상태") crawled_at: Optional[str] = Field(None, title="크롤링 시간", description="크롤링 완료 시간") # 블로그 콘텐츠 생성 From e54301adf8a4b69cf0bb6bfe10993c1da991d6c4 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Tue, 9 Sep 2025 17:03:53 +0900 Subject: [PATCH 09/31] =?UTF-8?q?refactor:=20=EC=97=94=EB=93=9C=ED=8F=AC?= =?UTF-8?q?=EC=9D=B8=ED=8A=B8=20summary=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/api/endpoints/blog.py | 18 ++++----- .../app/api/endpoints/keywords.py | 39 +++++++++++-------- .../app/api/endpoints/product.py | 23 ++++++----- 3 files changed, 43 insertions(+), 37 deletions(-) diff --git a/apps/pre-processing-service/app/api/endpoints/blog.py b/apps/pre-processing-service/app/api/endpoints/blog.py index 6a771cae..341c0aac 100644 --- a/apps/pre-processing-service/app/api/endpoints/blog.py +++ b/apps/pre-processing-service/app/api/endpoints/blog.py @@ -5,30 +5,26 @@ from app.service.blog.tistory_blog_post_service import TistoryBlogPostService from app.service.blog.naver_blog_post_service import NaverBlogPostService -# 이 파일만의 독립적인 라우터를 생성합니다. router = APIRouter() -@router.get("/") +@router.get("/", summary="블로그 API 상태 확인") async def root(): return {"message": "blog API"} -@router.post("/rag/create", response_model=ResponseBlogCreate) +@router.post("/rag/create", response_model=ResponseBlogCreate, summary="RAG 기반 블로그 콘텐츠 생성") async def rag_create(request: RequestBlogCreate): """ RAG 기반 블로그 콘텐츠 생성 """ return {"message": "blog API"} -@router.post("/publish", response_model=ResponseBlogPublish) +@router.post("/publish", response_model=ResponseBlogPublish, summary="블로그 콘텐츠 배포 (네이버/티스토리/블로거 지원)") async def publish(request: RequestBlogPublish): """ - 생성된 블로그 콘텐츠 배포 - 네이버 블로그와 티스토리 블로그를 지원 - 현재는 생성된 콘텐츠가 아닌, 임의의 제목,내용,태그를 배포 - :param request: RequestBlogPublish - :return: ResponseBlogPublish + 생성된 블로그 콘텐츠를 배포합니다. + 네이버 블로그와 티스토리 블로그를 지원하며, + 현재는 생성된 콘텐츠가 아닌 임의의 제목, 내용, 태그를 배포합니다. """ - if request.tag == "naver": naver_service = NaverBlogPostService() result = naver_service.post_content( @@ -64,4 +60,4 @@ async def publish(request: RequestBlogPublish): schedule_his_id= 1, status="200", metadata=result - ) \ No newline at end of file + ) diff --git a/apps/pre-processing-service/app/api/endpoints/keywords.py b/apps/pre-processing-service/app/api/endpoints/keywords.py index f445858a..22c23fa6 100644 --- a/apps/pre-processing-service/app/api/endpoints/keywords.py +++ b/apps/pre-processing-service/app/api/endpoints/keywords.py @@ -1,35 +1,42 @@ -# app/api/endpoints/keywords.py from ...service.keyword_service import keyword_search - from fastapi import APIRouter -from ...errors.CustomException import * +from ...errors.CustomException import * from ...model.schemas import RequestNaverSearch, ResponseNaverSearch -# 이 파일만의 독립적인 라우터를 생성합니다. router = APIRouter() -@router.get("/") + +@router.get("/", summary="키워드 API 상태 확인") async def root(): + """ + 키워드 API가 정상 동작하는지 확인 + """ return {"message": "keyword API"} -@router.post("/search",response_model=ResponseNaverSearch) + +@router.post("/search", response_model=ResponseNaverSearch, summary="네이버 키워드 검색") async def search(request: RequestNaverSearch): """ - 이 엔드포인트는 아래와 같은 JSON 요청을 받습니다. - RequestBase와 RequestNaverSearch의 모든 필드를 포함해야 합니다. + 이 엔드포인트는 JSON 요청으로 네이버 키워드 검색을 수행합니다. + + 요청 예시: { - "job_id":1, + "job_id": 1, "schedule_id": 1, - "sschdule_his_id":1, - "tag":"naver", - "category":"50000000", - "start_date":"2025-09-01", - "end_date":"2025-09-02" + "schedule_his_id": 1, + "tag": "naver", + "category": "50000000", + "start_date": "2025-09-01", + "end_date": "2025-09-02" } """ - response_data= await keyword_search(request) + response_data = await keyword_search(request) return response_data -@router.post("/ssadagu/validate",response_model=ResponseNaverSearch) + +@router.post("/ssadagu/validate", response_model=ResponseNaverSearch, summary="사다구몰 키워드 검증") async def ssadagu_validate(request: RequestNaverSearch): + """ + 사다구몰 키워드 검증 테스트용 엔드포인트 + """ return ResponseNaverSearch() diff --git a/apps/pre-processing-service/app/api/endpoints/product.py b/apps/pre-processing-service/app/api/endpoints/product.py index f8f69dff..023096f8 100644 --- a/apps/pre-processing-service/app/api/endpoints/product.py +++ b/apps/pre-processing-service/app/api/endpoints/product.py @@ -10,15 +10,18 @@ router = APIRouter() -@router.get("/") +@router.get("/", summary="상품 API 상태 확인") async def root(): + """ + 상품 API 서버 상태 확인용 엔드포인트 + """ return {"message": "product API"} -@router.post("/search", response_model=ResponseSadaguSearch) +@router.post("/search", response_model=ResponseSadaguSearch, summary="상품 검색") async def search(request: RequestSadaguSearch): """ - 상품 검색 엔드포인트 + 요청된 키워드로 사다구몰 상품을 검색합니다. """ try: search_service = SearchService() @@ -34,10 +37,10 @@ async def search(request: RequestSadaguSearch): raise HTTPException(status_code=500, detail=str(e)) -@router.post("/match", response_model=ResponseSadaguMatch) +@router.post("/match", response_model=ResponseSadaguMatch, summary="상품 매칭") async def match(request: RequestSadaguMatch): """ - 상품 매칭 엔드포인트 + 검색 결과 상품과 키워드를 기반으로 매칭을 수행합니다. """ try: match_service = MatchService() @@ -53,10 +56,10 @@ async def match(request: RequestSadaguMatch): raise HTTPException(status_code=500, detail=str(e)) -@router.post("/similarity", response_model=ResponseSadaguSimilarity) +@router.post("/similarity", response_model=ResponseSadaguSimilarity, summary="상품 유사도 분석") async def similarity(request: RequestSadaguSimilarity): """ - 유사도 분석 엔드포인트 + 매칭된 상품들 중 키워드와의 유사도를 계산하여 최적의 상품을 선택합니다. """ try: similarity_service = SimilarityService() @@ -72,10 +75,10 @@ async def similarity(request: RequestSadaguSimilarity): raise HTTPException(status_code=500, detail=str(e)) -@router.post("/crawl", response_model=ResponseSadaguCrawl) +@router.post("/crawl", response_model=ResponseSadaguCrawl, summary="상품 상세 정보 크롤링") async def crawl(request: Request, body: RequestSadaguCrawl): """ - 상품 상세 정보 크롤링 엔드포인트 + 상품 상세 페이지를 크롤링하여 상세 정보를 수집합니다. """ try: crawl_service = CrawlService() @@ -90,4 +93,4 @@ async def crawl(request: Request, body: RequestSadaguCrawl): except ItemNotFoundException as e: raise HTTPException(status_code=e.status_code, detail=e.detail) except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) \ No newline at end of file + raise HTTPException(status_code=500, detail=str(e)) From 2b40d56e5c4d7a7f30bb0542747e3c30f70909a6 Mon Sep 17 00:00:00 2001 From: Jihu Kim Date: Tue, 9 Sep 2025 18:03:52 +0900 Subject: [PATCH 10/31] =?UTF-8?q?=EC=8A=A4=ED=82=A4=EB=A7=88=20=EC=88=98?= =?UTF-8?q?=EC=A0=95=20=EB=B0=8F=20Table=20=EB=84=A4=EC=9D=B4=EB=B0=8D,=20?= =?UTF-8?q?DTO=20=EB=84=A4=EC=9D=B4=EB=B0=8D=20=EC=BB=A8=EB=B2=A4=EC=85=98?= =?UTF-8?q?=EC=97=90=20=EB=94=B0=EB=9D=BC=20=EB=A6=AC=ED=8C=A9=ED=86=A0?= =?UTF-8?q?=EB=A7=81=20(#53)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor: DTO 네이밍을 단수형으로 변경 * refactor: 테이블 명을 단수형으로 통일, 외래키 제약 조건 제거 * chore: Workflows run 관련 테이블 추가 * chore: Table 스키마 변경 ERD cloud와 일치 * refactor: User 테이블 명을 단수형으로 통일 * fix: Schemas to schema (typo) * chore: if not exits create sql에서 제거 * chore: Drop all table before create shcema * fix: Schema SQL 스크립트를 수정 * fix: Drop SQL 스크립트 분리 및 Schema 수정 --------- Co-authored-by: can019 --- ...entsCardDto.java => DepartmentCardDo.java} | 2 +- .../controller/OrganizationController.java | 4 +- ...onsDto.java => OrganizationOptionDto.java} | 10 +- .../service/OrganizationService.java | 14 +- .../{RolesCardDto.java => RoleCardDto.java} | 2 +- .../icebang/mapper/OrganizationMapper.java | 8 +- .../main/resources/application-develop.yml | 4 +- .../main/resources/application-test-e2e.yml | 4 +- .../main/resources/application-test-unit.yml | 4 +- .../resources/mybatis/mapper/AuthMapper.xml | 16 +- .../mybatis/mapper/OrganizationMapper.xml | 4 +- .../mybatis/mapper/ScheduleMapper.xml | 2 +- .../src/main/resources/sql/00-drop-h2.sql | 6 + .../src/main/resources/sql/00-drop-maria.sql | 18 ++ .../src/main/resources/sql/00-truncate.sql | 25 +- .../sql/01-insert-internal-users.sql | 240 ++++++--------- .../src/main/resources/sql/01-schema.sql | 289 ++++++++++++++++++ .../sql/02-insert-external-users.sql | 241 +++++++-------- .../src/main/resources/sql/schema.sql | 256 ---------------- .../icebang/DatabaseConnectionTest.java | 2 +- 20 files changed, 562 insertions(+), 589 deletions(-) rename apps/user-service/src/main/java/com/gltkorea/icebang/domain/department/dto/{DepartmentsCardDto.java => DepartmentCardDo.java} (87%) rename apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/{OrganizationOptionsDto.java => OrganizationOptionDto.java} (55%) rename apps/user-service/src/main/java/com/gltkorea/icebang/domain/roles/dto/{RolesCardDto.java => RoleCardDto.java} (92%) create mode 100644 apps/user-service/src/main/resources/sql/00-drop-h2.sql create mode 100644 apps/user-service/src/main/resources/sql/00-drop-maria.sql create mode 100644 apps/user-service/src/main/resources/sql/01-schema.sql delete mode 100644 apps/user-service/src/main/resources/sql/schema.sql diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/department/dto/DepartmentsCardDto.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/department/dto/DepartmentCardDo.java similarity index 87% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/department/dto/DepartmentsCardDto.java rename to apps/user-service/src/main/java/com/gltkorea/icebang/domain/department/dto/DepartmentCardDo.java index 5f50fabd..e891e966 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/department/dto/DepartmentsCardDto.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/department/dto/DepartmentCardDo.java @@ -9,7 +9,7 @@ @Data @Builder @AllArgsConstructor -public class DepartmentsCardDto { +public class DepartmentCardDo { private BigInteger id; private String name; } diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/controller/OrganizationController.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/controller/OrganizationController.java index ff3567b9..7375fcbe 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/controller/OrganizationController.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/controller/OrganizationController.java @@ -11,7 +11,7 @@ import com.gltkorea.icebang.common.dto.ApiResponse; import com.gltkorea.icebang.domain.organization.dto.OrganizationCardDto; -import com.gltkorea.icebang.domain.organization.dto.OrganizationOptionsDto; +import com.gltkorea.icebang.domain.organization.dto.OrganizationOptionDto; import com.gltkorea.icebang.domain.organization.service.OrganizationService; import lombok.RequiredArgsConstructor; @@ -28,7 +28,7 @@ public ResponseEntity>> getOrganizations() } @GetMapping("/{id}/options") - public ResponseEntity> getOrganizationDetails( + public ResponseEntity> getOrganizationDetails( @PathVariable BigInteger id) { return ResponseEntity.ok(ApiResponse.success(organizationService.getOrganizationOptions(id))); } diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationOptionsDto.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationOptionDto.java similarity index 55% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationOptionsDto.java rename to apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationOptionDto.java index c416d811..d31534eb 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationOptionsDto.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationOptionDto.java @@ -2,9 +2,9 @@ import java.util.List; -import com.gltkorea.icebang.domain.department.dto.DepartmentsCardDto; +import com.gltkorea.icebang.domain.department.dto.DepartmentCardDo; import com.gltkorea.icebang.domain.position.dto.PositionCardDto; -import com.gltkorea.icebang.domain.roles.dto.RolesCardDto; +import com.gltkorea.icebang.domain.roles.dto.RoleCardDto; import lombok.AllArgsConstructor; import lombok.Builder; @@ -13,8 +13,8 @@ @Builder @Data @AllArgsConstructor -public class OrganizationOptionsDto { - List departments; +public class OrganizationOptionDto { + List departments; List positions; - List roles; + List roles; } diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/service/OrganizationService.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/service/OrganizationService.java index 4cebdfe5..84bcc54c 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/service/OrganizationService.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/service/OrganizationService.java @@ -6,11 +6,11 @@ import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import com.gltkorea.icebang.domain.department.dto.DepartmentsCardDto; +import com.gltkorea.icebang.domain.department.dto.DepartmentCardDo; import com.gltkorea.icebang.domain.organization.dto.OrganizationCardDto; -import com.gltkorea.icebang.domain.organization.dto.OrganizationOptionsDto; +import com.gltkorea.icebang.domain.organization.dto.OrganizationOptionDto; import com.gltkorea.icebang.domain.position.dto.PositionCardDto; -import com.gltkorea.icebang.domain.roles.dto.RolesCardDto; +import com.gltkorea.icebang.domain.roles.dto.RoleCardDto; import com.gltkorea.icebang.mapper.OrganizationMapper; import lombok.RequiredArgsConstructor; @@ -25,12 +25,12 @@ public List getAllOrganizationList() { return organizationMapper.findAllOrganizations(); } - public OrganizationOptionsDto getOrganizationOptions(BigInteger id) { - List departments = organizationMapper.findDepartmentsByOrganizationId(id); + public OrganizationOptionDto getOrganizationOptions(BigInteger id) { + List departments = organizationMapper.findDepartmentsByOrganizationId(id); List positions = organizationMapper.findPositionsByOrganizationId(id); - List roles = organizationMapper.findRolesByOrganizationId(id); + List roles = organizationMapper.findRolesByOrganizationId(id); - return OrganizationOptionsDto.builder() + return OrganizationOptionDto.builder() .departments(departments) .positions(positions) .roles(roles) diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/roles/dto/RolesCardDto.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/roles/dto/RoleCardDto.java similarity index 92% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/roles/dto/RolesCardDto.java rename to apps/user-service/src/main/java/com/gltkorea/icebang/domain/roles/dto/RoleCardDto.java index 709a08ff..5d468be5 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/roles/dto/RolesCardDto.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/roles/dto/RoleCardDto.java @@ -11,7 +11,7 @@ @Builder @AllArgsConstructor @NoArgsConstructor -public class RolesCardDto { +public class RoleCardDto { private BigInteger id; private String name; private String description; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/OrganizationMapper.java b/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/OrganizationMapper.java index 2643af9f..a7624bc6 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/OrganizationMapper.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/OrganizationMapper.java @@ -6,20 +6,20 @@ import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; -import com.gltkorea.icebang.domain.department.dto.DepartmentsCardDto; +import com.gltkorea.icebang.domain.department.dto.DepartmentCardDo; import com.gltkorea.icebang.domain.organization.dto.OrganizationCardDto; import com.gltkorea.icebang.domain.position.dto.PositionCardDto; -import com.gltkorea.icebang.domain.roles.dto.RolesCardDto; +import com.gltkorea.icebang.domain.roles.dto.RoleCardDto; @Mapper public interface OrganizationMapper { List findAllOrganizations(); - List findDepartmentsByOrganizationId( + List findDepartmentsByOrganizationId( @Param("organizationId") BigInteger organizationId); List findPositionsByOrganizationId( @Param("organizationId") BigInteger organizationId); - List findRolesByOrganizationId(@Param("organizationId") BigInteger organizationId); + List findRolesByOrganizationId(@Param("organizationId") BigInteger organizationId); } diff --git a/apps/user-service/src/main/resources/application-develop.yml b/apps/user-service/src/main/resources/application-develop.yml index 773a7333..6d9a8ea3 100644 --- a/apps/user-service/src/main/resources/application-develop.yml +++ b/apps/user-service/src/main/resources/application-develop.yml @@ -26,7 +26,9 @@ spring: sql: init: mode: always - schema-locations: classpath:sql/schema.sql + schema-locations: + - classpath:sql/00-drop-maria.sql + - classpath:sql/01-schema.sql data-locations: - classpath:sql/00-truncate.sql - classpath:sql/01-insert-internal-users.sql diff --git a/apps/user-service/src/main/resources/application-test-e2e.yml b/apps/user-service/src/main/resources/application-test-e2e.yml index 7703f4a3..8759b298 100644 --- a/apps/user-service/src/main/resources/application-test-e2e.yml +++ b/apps/user-service/src/main/resources/application-test-e2e.yml @@ -6,7 +6,9 @@ spring: sql: init: mode: always - schema-locations: classpath:sql/schema.sql + schema-locations: + - classpath:sql/00-drop-maria.sql + - classpath:sql/01-schema.sql encoding: UTF-8 mybatis: diff --git a/apps/user-service/src/main/resources/application-test-unit.yml b/apps/user-service/src/main/resources/application-test-unit.yml index fec65f43..4b36c77f 100644 --- a/apps/user-service/src/main/resources/application-test-unit.yml +++ b/apps/user-service/src/main/resources/application-test-unit.yml @@ -37,7 +37,9 @@ spring: sql: init: mode: always - schema-locations: classpath:sql/schema.sql + schema-locations: + - classpath:sql/00-drop-h2.sql + - classpath:sql/01-schema.sql encoding: UTF-8 mybatis: diff --git a/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml index 0c36cc21..c503e76e 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml @@ -4,34 +4,30 @@ - - - INSERT INTO users (name, email, password) + INSERT INTO user (name, email, password) VALUES (#{name}, #{email}, #{password}); - - INSERT INTO user_organizations (user_id, organization_id, department_id, position_id, status) - VALUES (#{id}, #{orgId}, #{deptId}, #{positionId}, #{status}); + INSERT INTO user_organization (user_id, organization_id, department_id, position_id, status) + VALUES (#{id}, #{organizationId}, #{departmentId}, #{positionId}, #{status}); - - INSERT INTO user_roles (user_organization_id, role_id) + INSERT INTO user_role (user_organization_id, role_id) VALUES (#{userOrgId}, #{roleId}) - + \ No newline at end of file diff --git a/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml index cdc403fb..6a8201b8 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml @@ -14,7 +14,7 @@ diff --git a/apps/user-service/src/main/resources/sql/00-drop-h2.sql b/apps/user-service/src/main/resources/sql/00-drop-h2.sql new file mode 100644 index 00000000..d0c7bda3 --- /dev/null +++ b/apps/user-service/src/main/resources/sql/00-drop-h2.sql @@ -0,0 +1,6 @@ +SET FOREIGN_KEY_CHECKS = 0; + +-- H2에서 모든 테이블과 객체를 삭제하는 올바른 구문 +DROP ALL OBJECTS; + +SET FOREIGN_KEY_CHECKS = 1; \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/00-drop-maria.sql b/apps/user-service/src/main/resources/sql/00-drop-maria.sql new file mode 100644 index 00000000..d93b57b4 --- /dev/null +++ b/apps/user-service/src/main/resources/sql/00-drop-maria.sql @@ -0,0 +1,18 @@ +SET FOREIGN_KEY_CHECKS = 0; +SET @tables = NULL; + +-- 1. 데이터베이스 내 모든 테이블 목록을 가져와 변수에 저장 +-- 백틱(`)을 사용하여 테이블 이름에 공백이나 특수 문자가 있어도 안전하게 처리합니다. +SELECT GROUP_CONCAT(CONCAT('`', table_name, '`')) INTO @tables +FROM information_schema.tables +WHERE table_schema = DATABASE(); + +-- 2. 변수 값이 NULL인 경우를 대비하여 조건문 추가 및 DROP TABLE 구문 생성 +SET @drop_tables_sql = IFNULL(CONCAT('DROP TABLE ', @tables), 'SELECT "No tables to drop";'); + +-- 3. 동적 SQL 실행 +PREPARE stmt FROM @drop_tables_sql; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +SET FOREIGN_KEY_CHECKS = 1; \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/00-truncate.sql b/apps/user-service/src/main/resources/sql/00-truncate.sql index 93cbfd4a..497b6a4e 100644 --- a/apps/user-service/src/main/resources/sql/00-truncate.sql +++ b/apps/user-service/src/main/resources/sql/00-truncate.sql @@ -1,15 +1,12 @@ --- 데이터 초기화 전에 추가 -SET FOREIGN_KEY_CHECKS = 0; +-- 데이터 초기화 스크립트 (외래 키 제약조건이 없는 스키마용) --- 역순으로 TRUNCATE (참조되는 테이블을 나중에) -TRUNCATE TABLE user_roles; -TRUNCATE TABLE role_permissions; -TRUNCATE TABLE user_organizations; -TRUNCATE TABLE users; -TRUNCATE TABLE positions; -TRUNCATE TABLE departments; -TRUNCATE TABLE roles; -TRUNCATE TABLE permissions; -TRUNCATE TABLE organizations; - -SET FOREIGN_KEY_CHECKS = 1; \ No newline at end of file +-- 사용자 및 조직 관련 테이블 +TRUNCATE TABLE `user_role`; +TRUNCATE TABLE `role_permission`; +TRUNCATE TABLE `user_organization`; +TRUNCATE TABLE `user`; +TRUNCATE TABLE `position`; +TRUNCATE TABLE `department`; +TRUNCATE TABLE `role`; +TRUNCATE TABLE `permission`; +TRUNCATE TABLE `organization`; \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/01-insert-internal-users.sql b/apps/user-service/src/main/resources/sql/01-insert-internal-users.sql index 29f1f81a..3a8529c8 100644 --- a/apps/user-service/src/main/resources/sql/01-insert-internal-users.sql +++ b/apps/user-service/src/main/resources/sql/01-insert-internal-users.sql @@ -1,29 +1,31 @@ -- icebang 내부 직원 전체 INSERT -- 1. icebang 조직 -INSERT INTO `organizations` (`name`, `domain_name`) VALUES +INSERT INTO `organization` (`name`, `domain_name`) VALUES ('icebang', 'icebang.site'); -- 2. icebang 부서들 -INSERT INTO `departments` (`organization_id`, `name`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'AI개발팀'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '데이터팀'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '콘텐츠팀'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '마케팅팀'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '운영팀'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '기획팀'); +SET @org_id = (SELECT id FROM organization WHERE domain_name = 'icebang.site' LIMIT 1); + +INSERT INTO `department` (`organization_id`, `name`) VALUES + (@org_id, 'AI개발팀'), + (@org_id, '데이터팀'), + (@org_id, '콘텐츠팀'), + (@org_id, '마케팅팀'), + (@org_id, '운영팀'), + (@org_id, '기획팀'); -- 3. icebang 직책들 -INSERT INTO `positions` (`organization_id`, `title`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'CEO'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'CTO'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '팀장'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '시니어'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '주니어'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), '인턴'); +INSERT INTO `position` (`organization_id`, `title`) VALUES + (@org_id, 'CEO'), + (@org_id, 'CTO'), + (@org_id, '팀장'), + (@org_id, '시니어'), + (@org_id, '주니어'), + (@org_id, '인턴'); -- 4. 바이럴 콘텐츠 워크플로우 권한들 -INSERT INTO `permissions` (`resource`, `description`) VALUES +INSERT INTO `permission` (`resource`, `description`) VALUES -- 사용자 관리 ('users.create', '사용자 생성'), ('users.read', '사용자 조회'), @@ -117,145 +119,93 @@ INSERT INTO `permissions` (`resource`, `description`) VALUES ('system.backup.restore', '시스템 백업 복원'); -- 5. 시스템 공통 역할 -INSERT INTO `roles` (`organization_id`, `name`, `description`) VALUES - (NULL, 'SUPER_ADMIN', '최고 관리자 - 모든 권한'), - (NULL, 'SYSTEM_ADMIN', '시스템 관리자 - 시스템 설정 및 관리'), - (NULL, 'ORG_ADMIN', '조직 관리자 - 조직 내 모든 권한'), - (NULL, 'USER', '일반 사용자 - 기본 사용 권한'), - (NULL, 'GUEST', '게스트 - 제한된 조회 권한'); +INSERT INTO `role` (`organization_id`, `name`, `description`) VALUES + (NULL, 'SUPER_ADMIN', '최고 관리자 - 모든 권한'), + (NULL, 'SYSTEM_ADMIN', '시스템 관리자 - 시스템 설정 및 관리'), + (NULL, 'ORG_ADMIN', '조직 관리자 - 조직 내 모든 권한'), + (NULL, 'USER', '일반 사용자 - 기본 사용 권한'), + (NULL, 'GUEST', '게스트 - 제한된 조회 권한'); -- 6. icebang 전용 역할 -INSERT INTO `roles` (`organization_id`, `name`, `description`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'AI_ENGINEER', 'AI 엔지니어 - AI 모델 개발 및 최적화'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'DATA_SCIENTIST', '데이터 사이언티스트 - 데이터 분석 및 인사이트 도출'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'CRAWLING_ENGINEER', '크롤링 엔지니어 - 웹 크롤링 시스템 개발'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'CONTENT_CREATOR', '콘텐츠 크리에이터 - 바이럴 콘텐츠 제작'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'CONTENT_MANAGER', '콘텐츠 매니저 - 콘텐츠 기획 및 관리'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'WORKFLOW_ADMIN', '워크플로우 관리자 - 자동화 프로세스 관리'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'MARKETING_ANALYST', '마케팅 분석가 - 마케팅 성과 분석'), - ((SELECT id FROM organizations WHERE domain_name = 'icebang.site'), 'OPERATIONS_MANAGER', '운영 매니저 - 시스템 운영 및 모니터링'); +INSERT INTO `role` (`organization_id`, `name`, `description`) VALUES + (@org_id, 'AI_ENGINEER', 'AI 엔지니어 - AI 모델 개발 및 최적화'), + (@org_id, 'DATA_SCIENTIST', '데이터 사이언티스트 - 데이터 분석 및 인사이트 도출'), + (@org_id, 'CRAWLING_ENGINEER', '크롤링 엔지니어 - 웹 크롤링 시스템 개발'), + (@org_id, 'CONTENT_CREATOR', '콘텐츠 크리에이터 - 바이럴 콘텐츠 제작'), + (@org_id, 'CONTENT_MANAGER', '콘텐츠 매니저 - 콘텐츠 기획 및 관리'), + (@org_id, 'WORKFLOW_ADMIN', '워크플로우 관리자 - 자동화 프로세스 관리'), + (@org_id, 'MARKETING_ANALYST', '마케팅 분석가 - 마케팅 성과 분석'), + (@org_id, 'OPERATIONS_MANAGER', '운영 매니저 - 시스템 운영 및 모니터링'); -- 7. icebang 직원들 -INSERT INTO `users` (`name`, `email`, `password`, `status`) VALUES - ('김아이스', 'ice.kim@icebang.site', '$2a$10$encrypted_password_hash1', 'ACTIVE'), - ('박방방', 'bang.park@icebang.site', '$2a$10$encrypted_password_hash2', 'ACTIVE'), - ('이트렌드', 'trend.lee@icebang.site', '$2a$10$encrypted_password_hash3', 'ACTIVE'), - ('정바이럴', 'viral.jung@icebang.site', '$2a$10$encrypted_password_hash4', 'ACTIVE'), - ('최콘텐츠', 'content.choi@icebang.site', '$2a$10$encrypted_password_hash5', 'ACTIVE'), - ('홍크롤러', 'crawler.hong@icebang.site', '$2a$10$encrypted_password_hash6', 'ACTIVE'), - ('서데이터', 'data.seo@icebang.site', '$2a$10$encrypted_password_hash7', 'ACTIVE'), - ('윤워크플로', 'workflow.yoon@icebang.site', '$2a$10$encrypted_password_hash8', 'ACTIVE'), - ('시스템관리자', 'admin@icebang.site', '$2a$10$encrypted_password_hash0', 'ACTIVE'); +INSERT INTO `user` (`name`, `email`, `password`, `status`) VALUES + ('김아이스', 'ice.kim@icebang.site', '$2a$10$encrypted_password_hash1', 'ACTIVE'), + ('박방방', 'bang.park@icebang.site', '$2a$10$encrypted_password_hash2', 'ACTIVE'), + ('이트렌드', 'trend.lee@icebang.site', '$2a$10$encrypted_password_hash3', 'ACTIVE'), + ('정바이럴', 'viral.jung@icebang.site', '$2a$10$encrypted_password_hash4', 'ACTIVE'), + ('최콘텐츠', 'content.choi@icebang.site', '$2a$10$encrypted_password_hash5', 'ACTIVE'), + ('홍크롤러', 'crawler.hong@icebang.site', '$2a$10$encrypted_password_hash6', 'ACTIVE'), + ('서데이터', 'data.seo@icebang.site', '$2a$10$encrypted_password_hash7', 'ACTIVE'), + ('윤워크플로', 'workflow.yoon@icebang.site', '$2a$10$encrypted_password_hash8', 'ACTIVE'), + ('시스템관리자', 'admin@icebang.site', '$2a$10$encrypted_password_hash0', 'ACTIVE'); -- 8. icebang 직원-조직 연결 -INSERT INTO `user_organizations` (`user_id`, `organization_id`, `position_id`, `department_id`, `employee_number`, `status`) VALUES --- 김아이스 - CEO, 기획팀 -((SELECT id FROM users WHERE email = 'ice.kim@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = 'CEO' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = '기획팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'PLN25001', 'ACTIVE'), - --- 박방방 - CTO, AI개발팀 -((SELECT id FROM users WHERE email = 'bang.park@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = 'CTO' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = 'AI개발팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'AI25001', 'ACTIVE'), - --- 이트렌드 - 팀장, 데이터팀 -((SELECT id FROM users WHERE email = 'trend.lee@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = '팀장' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = '데이터팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'DAT25001', 'ACTIVE'), - --- 정바이럴 - 팀장, 콘텐츠팀 -((SELECT id FROM users WHERE email = 'viral.jung@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = '팀장' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = '콘텐츠팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'CON25001', 'ACTIVE'), - -((SELECT id FROM users WHERE email = 'content.choi@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = '시니어' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = '콘텐츠팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'CON25002', 'ACTIVE'), - --- 홍크롤러 - 시니어, AI개발팀 -((SELECT id FROM users WHERE email = 'crawler.hong@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = '시니어' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = 'AI개발팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'AI25002', 'ACTIVE'), - --- 서데이터 - 시니어, 데이터팀 -((SELECT id FROM users WHERE email = 'data.seo@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = '시니어' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = '데이터팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'DAT25002', 'ACTIVE'), - --- 윤워크플로 - 팀장, 운영팀 -((SELECT id FROM users WHERE email = 'workflow.yoon@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = '팀장' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = '운영팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'OPS25001', 'ACTIVE'), - --- 시스템관리자 - CTO, 운영팀 -((SELECT id FROM users WHERE email = 'admin@icebang.site'), - (SELECT id FROM organizations WHERE domain_name = 'icebang.site'), - (SELECT id FROM positions WHERE title = 'CTO' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - (SELECT id FROM departments WHERE name = '운영팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), - 'OPS25000', 'ACTIVE'); +INSERT INTO `user_organization` (`user_id`, `organization_id`, `position_id`, `department_id`, `employee_number`, `status`) VALUES + ((SELECT id FROM user WHERE email = 'ice.kim@icebang.site'), @org_id, (SELECT id FROM position WHERE title = 'CEO' AND organization_id = @org_id), (SELECT id FROM department WHERE name = '기획팀' AND organization_id = @org_id), 'PLN25001', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'bang.park@icebang.site'), @org_id, (SELECT id FROM position WHERE title = 'CTO' AND organization_id = @org_id), (SELECT id FROM department WHERE name = 'AI개발팀' AND organization_id = @org_id), 'AI25001', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'trend.lee@icebang.site'), @org_id, (SELECT id FROM position WHERE title = '팀장' AND organization_id = @org_id), (SELECT id FROM department WHERE name = '데이터팀' AND organization_id = @org_id), 'DAT25001', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'viral.jung@icebang.site'), @org_id, (SELECT id FROM position WHERE title = '팀장' AND organization_id = @org_id), (SELECT id FROM department WHERE name = '콘텐츠팀' AND organization_id = @org_id), 'CON25001', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'content.choi@icebang.site'), @org_id, (SELECT id FROM position WHERE title = '시니어' AND organization_id = @org_id), (SELECT id FROM department WHERE name = '콘텐츠팀' AND organization_id = @org_id), 'CON25002', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'crawler.hong@icebang.site'), @org_id, (SELECT id FROM position WHERE title = '시니어' AND organization_id = @org_id), (SELECT id FROM department WHERE name = 'AI개발팀' AND organization_id = @org_id), 'AI25002', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'data.seo@icebang.site'), @org_id, (SELECT id FROM position WHERE title = '시니어' AND organization_id = @org_id), (SELECT id FROM department WHERE name = '데이터팀' AND organization_id = @org_id), 'DAT25002', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'workflow.yoon@icebang.site'), @org_id, (SELECT id FROM position WHERE title = '팀장' AND organization_id = @org_id), (SELECT id FROM department WHERE name = '운영팀' AND organization_id = @org_id), 'OPS25001', 'ACTIVE'), + ((SELECT id FROM user WHERE email = 'admin@icebang.site'), @org_id, (SELECT id FROM position WHERE title = 'CTO' AND organization_id = @org_id), (SELECT id FROM department WHERE name = '운영팀' AND organization_id = @org_id), 'OPS25000', 'ACTIVE'); -- 9. 역할별 권한 할당 -- SUPER_ADMIN 모든 권한 -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'SUPER_ADMIN'), + (SELECT id FROM role WHERE name = 'SUPER_ADMIN'), id -FROM permissions; +FROM permission; -- ORG_ADMIN 조직 내 모든 권한 (시스템 권한 제외) -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'ORG_ADMIN'), + (SELECT id FROM role WHERE name = 'ORG_ADMIN'), id -FROM permissions +FROM permission WHERE resource NOT LIKE 'system.%'; -- AI_ENGINEER 권한 -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'AI_ENGINEER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'AI_ENGINEER' AND organization_id = @org_id), id -FROM permissions +FROM permission WHERE resource LIKE 'ai.%' OR resource LIKE 'crawling.%' OR resource LIKE 'workflows.%' OR resource IN ('content.read', 'trends.read', 'analytics.read'); -- DATA_SCIENTIST 권한 -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'DATA_SCIENTIST' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'DATA_SCIENTIST' AND organization_id = @org_id), id -FROM permissions +FROM permission WHERE resource LIKE 'trends.%' OR resource LIKE 'analytics.%' OR resource LIKE 'reports.%' OR resource IN ('content.read', 'campaigns.read', 'crawling.read'); -- CONTENT_MANAGER 권한 -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'CONTENT_MANAGER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'CONTENT_MANAGER' AND organization_id = @org_id), id -FROM permissions +FROM permission WHERE resource LIKE 'content.%' OR resource LIKE 'campaigns.%' OR resource LIKE 'trends.%' @@ -263,11 +213,11 @@ WHERE resource LIKE 'content.%' OR resource IN ('users.read.department'); -- WORKFLOW_ADMIN 권한 -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'WORKFLOW_ADMIN' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'WORKFLOW_ADMIN' AND organization_id = @org_id), id -FROM permissions +FROM permission WHERE resource LIKE 'workflows.%' OR resource LIKE 'ai.%' OR resource LIKE 'crawling.%' @@ -277,54 +227,54 @@ WHERE resource LIKE 'workflows.%' -- 10. icebang 직원별 역할 할당 -- 김아이스(CEO) - ORG_ADMIN -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'ORG_ADMIN'), + (SELECT id FROM role WHERE name = 'ORG_ADMIN'), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'ice.kim@icebang.site'; -- 박방방(CTO) - AI_ENGINEER + WORKFLOW_ADMIN -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'AI_ENGINEER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'AI_ENGINEER' AND organization_id = @org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'bang.park@icebang.site'; -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'WORKFLOW_ADMIN' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'WORKFLOW_ADMIN' AND organization_id = @org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'bang.park@icebang.site'; -- 정바이럴(콘텐츠팀장) - CONTENT_MANAGER -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'CONTENT_MANAGER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'CONTENT_MANAGER' AND organization_id = @org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'viral.jung@icebang.site'; -- 이트렌드(데이터팀장) - DATA_SCIENTIST -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'DATA_SCIENTIST' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'icebang.site')), + (SELECT id FROM role WHERE name = 'DATA_SCIENTIST' AND organization_id = @org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'trend.lee@icebang.site'; -- 시스템관리자 - SUPER_ADMIN -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'SUPER_ADMIN'), + (SELECT id FROM role WHERE name = 'SUPER_ADMIN'), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'admin@icebang.site'; \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/01-schema.sql b/apps/user-service/src/main/resources/sql/01-schema.sql new file mode 100644 index 00000000..138425fc --- /dev/null +++ b/apps/user-service/src/main/resources/sql/01-schema.sql @@ -0,0 +1,289 @@ +-- MariaDB 최적화된 스키마 (단수형 테이블 네이밍, 외래 키 제약조건 제거 버전) +CREATE TABLE `permission` ( + `id` int unsigned NOT NULL AUTO_INCREMENT, + `resource` varchar(100) NULL, + `description` varchar(255) NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `is_active` boolean DEFAULT TRUE, + `updated_by` bigint unsigned NULL, + `created_by` bigint unsigned NULL, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `organization` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(150) NULL, + `domain_name` varchar(100) NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `role` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `organization_id` bigint unsigned NULL, + `name` varchar(100) NULL, + `description` varchar(500) NULL, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `user` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(50) NULL, + `email` varchar(100) NULL, + `password` varchar(255) NULL, + `status` varchar(20) NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `department` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `organization_id` bigint unsigned NOT NULL, + `name` varchar(100) NULL, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `position` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `organization_id` bigint unsigned NOT NULL, + `title` varchar(100) NULL, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `user_organization` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `user_id` bigint unsigned NOT NULL, + `organization_id` bigint unsigned NOT NULL, + `position_id` bigint unsigned NOT NULL, + `department_id` bigint unsigned NOT NULL, + `employee_number` varchar(50) NULL, + `status` varchar(20) NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `role_permission` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `role_id` bigint unsigned NOT NULL, + `permission_id` int unsigned NOT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_role_permission` (`role_id`, `permission_id`) + ); + +CREATE TABLE `user_role` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `role_id` bigint unsigned NOT NULL, + `user_organization_id` bigint unsigned NOT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_user_role` (`role_id`, `user_organization_id`) + ); + +-- 성능 최적화를 위한 인덱스 +CREATE INDEX `idx_user_email` ON `user` (`email`); +CREATE INDEX `idx_user_status` ON `user` (`status`); +CREATE INDEX `idx_user_organization_user` ON `user_organization` (`user_id`); +CREATE INDEX `idx_user_organization_org` ON `user_organization` (`organization_id`); +CREATE INDEX `idx_user_organization_status` ON `user_organization` (`status`); +CREATE INDEX `idx_role_org` ON `role` (`organization_id`); +CREATE INDEX `idx_permission_resource` ON `permission` (`resource`); +CREATE INDEX `idx_permission_active` ON `permission` (`is_active`); + + + +CREATE TABLE `workflow` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(100) NOT NULL UNIQUE, + `description` text NULL, + `is_enabled` boolean DEFAULT TRUE, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `created_by` bigint unsigned NULL, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `updated_by` bigint unsigned NULL, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `schedule` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `workflow_id` bigint unsigned NOT NULL, + `cron_expression` varchar(50) NULL, + `parameters` json NULL, + `is_active` boolean DEFAULT TRUE, + `last_run_status` varchar(20) NULL, + `last_run_at` timestamp NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `created_by` bigint unsigned NULL, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `updated_by` bigint unsigned NULL, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `job` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(100) NOT NULL UNIQUE, + `description` text NULL, + `is_enabled` boolean DEFAULT TRUE, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `created_by` bigint unsigned NULL, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `updated_by` bigint unsigned NULL, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `task` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(100) NOT NULL UNIQUE, + `type` varchar(50) NULL, + `parameters` json NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `workflow_job` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `workflow_id` bigint unsigned NOT NULL, + `job_id` bigint unsigned NOT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_workflow_job` (`workflow_id`, `job_id`) + ); + +CREATE TABLE `job_task` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `job_id` bigint unsigned NOT NULL, + `task_id` bigint unsigned NOT NULL, + `execution_order` int NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_job_task` (`job_id`, `task_id`) + ); + +CREATE TABLE `execution_log` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `execution_type` varchar(20) NULL COMMENT 'task, schedule, job, workflow', + `source_id` bigint unsigned NULL COMMENT '모든 데이터에 대한 ID ex: job_id, schedule_id, task_id, ...', + `log_level` varchar(20) NULL, + `executed_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `log_message` text NULL, + `trace_id` char(36) NULL, + `config_snapshot` json NULL, + PRIMARY KEY (`id`), + INDEX `idx_source_id_type` (`source_id`, `execution_type`) + ); + +CREATE TABLE `task_io_data` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `task_run_id` bigint unsigned NOT NULL, + `io_type` varchar(10) NOT NULL COMMENT 'INPUT, OUTPUT', + `name` varchar(100) NOT NULL COMMENT '파라미터/변수 이름', + `data_type` varchar(50) NOT NULL COMMENT 'string, number, json, file, etc', + `data_value` json NULL COMMENT '실제 데이터 값', + `data_size` bigint NULL COMMENT '데이터 크기 (bytes)', + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + INDEX `idx_task_io_task_run_id` (`task_run_id`), + INDEX `idx_task_io_type` (`io_type`), + INDEX `idx_task_io_name` (`name`) + ); + +CREATE TABLE `config` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `target_type` varchar(50) NULL COMMENT 'user, job, workflow', + `target_id` bigint unsigned NULL, + `version` int NULL, + `json` json NULL, + `is_active` boolean DEFAULT TRUE, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `created_by` bigint unsigned NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_config_target` (`target_type`, `target_id`) + ); + +CREATE TABLE `category` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(100) NULL, + `description` text NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) + ); + +CREATE TABLE `user_config` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `user_id` bigint unsigned NOT NULL, + `type` varchar(50) NULL, + `name` varchar(100) NULL, + `json` json NULL, + `is_active` boolean DEFAULT TRUE, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) + ); + +-- 인덱스 추가 (성능 최적화) +CREATE INDEX `idx_schedule_workflow` ON `schedule` (`workflow_id`); +CREATE INDEX `idx_job_enabled` ON `job` (`is_enabled`); +CREATE INDEX `idx_task_type` ON `task` (`type`); +CREATE INDEX `idx_workflow_enabled` ON `workflow` (`is_enabled`); +CREATE UNIQUE INDEX `uk_schedule_workflow` ON `schedule` (`workflow_id`); +CREATE UNIQUE INDEX `uk_job_name` ON `job` (`name`); +CREATE UNIQUE INDEX `uk_task_name` ON `task` (`name`); +CREATE UNIQUE INDEX `uk_workflow_name` ON `workflow` (`name`); +CREATE INDEX `idx_user_config_user` ON `user_config` (`user_id`); + + + +-- 워크플로우 실행 테이블 +CREATE TABLE `workflow_run` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `workflow_id` bigint unsigned NOT NULL, + `trace_id` char(36) NOT NULL, + `run_number` varchar(20) NULL, + `status` varchar(20) NULL COMMENT 'pending, running, success, failed, cancelled', + `trigger_type` varchar(20) NULL COMMENT 'manual, schedule, push, pull_request', + `started_at` timestamp NULL, + `finished_at` timestamp NULL, + `created_by` bigint unsigned NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_workflow_run_trace` (`trace_id`), + INDEX `idx_workflow_run_status` (`status`), + INDEX `idx_workflow_run_workflow_id` (`workflow_id`), + INDEX `idx_workflow_run_created_at` (`created_at`) + ); + +-- Job 실행 테이블 +CREATE TABLE `job_run` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `workflow_run_id` bigint unsigned NOT NULL, + `job_id` bigint unsigned NOT NULL, + `status` varchar(20) NULL COMMENT 'pending, running, success, failed, cancelled, skipped', + `started_at` timestamp NULL, + `finished_at` timestamp NULL, + `execution_order` int NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + INDEX `idx_job_run_workflow_run_id` (`workflow_run_id`), + INDEX `idx_job_run_status` (`status`), + INDEX `idx_job_run_job_id` (`job_id`) + ); + +-- Task 실행 테이블 +CREATE TABLE `task_run` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT, + `job_run_id` bigint unsigned NOT NULL, + `task_id` bigint unsigned NOT NULL, + `status` varchar(20) NULL COMMENT 'pending, running, success, failed, cancelled, skipped', + `started_at` timestamp NULL, + `finished_at` timestamp NULL, + `execution_order` int NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + INDEX `idx_task_run_job_run_id` (`job_run_id`), + INDEX `idx_task_run_status` (`status`), + INDEX `idx_task_run_task_id` (`task_id`) + ); + +CREATE INDEX `idx_task_io_data_task_run_id` ON `task_io_data` (`task_run_id`); \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/02-insert-external-users.sql b/apps/user-service/src/main/resources/sql/02-insert-external-users.sql index f4620bbd..b38f2c47 100644 --- a/apps/user-service/src/main/resources/sql/02-insert-external-users.sql +++ b/apps/user-service/src/main/resources/sql/02-insert-external-users.sql @@ -1,86 +1,92 @@ -- B2B 테스트용 외부 회사 INSERT -- 1. 외부 테스트 회사들 -INSERT INTO `organizations` (`name`, `domain_name`) VALUES - ('테크이노베이션', 'techinnovation.co.kr'), - ('디지털솔루션', 'digitalsolution.com'), - ('크리에이티브웍스', 'creativeworks.net'); +INSERT INTO `organization` (`name`, `domain_name`) VALUES + ('테크이노베이션', 'techinnovation.co.kr'), + ('디지털솔루션', 'digitalsolution.com'), + ('크리에이티브웍스', 'creativeworks.net'); -- 2. 테크이노베이션 부서들 -INSERT INTO `departments` (`organization_id`, `name`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '개발팀'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '디자인팀'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '인사팀'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '마케팅팀'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '영업팀'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '재무팀'); +SET @tech_org_id = (SELECT id FROM organization WHERE domain_name = 'techinnovation.co.kr' LIMIT 1); + +INSERT INTO `department` (`organization_id`, `name`) VALUES + (@tech_org_id, '개발팀'), + (@tech_org_id, '디자인팀'), + (@tech_org_id, '인사팀'), + (@tech_org_id, '마케팅팀'), + (@tech_org_id, '영업팀'), + (@tech_org_id, '재무팀'); -- 3. 디지털솔루션 부서들 -INSERT INTO `departments` (`organization_id`, `name`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '개발팀'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '기획팀'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '운영팀'); +SET @digital_org_id = (SELECT id FROM organization WHERE domain_name = 'digitalsolution.com' LIMIT 1); + +INSERT INTO `department` (`organization_id`, `name`) VALUES + (@digital_org_id, '개발팀'), + (@digital_org_id, '기획팀'), + (@digital_org_id, '운영팀'); -- 4. 크리에이티브웍스 부서들 -INSERT INTO `departments` (`organization_id`, `name`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), '디자인팀'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), '마케팅팀'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), '제작팀'); +SET @creative_org_id = (SELECT id FROM organization WHERE domain_name = 'creativeworks.net' LIMIT 1); + +INSERT INTO `department` (`organization_id`, `name`) VALUES + (@creative_org_id, '디자인팀'), + (@creative_org_id, '마케팅팀'), + (@creative_org_id, '제작팀'); -- 5. 테크이노베이션 직책들 -INSERT INTO `positions` (`organization_id`, `title`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '사원'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '주임'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '대리'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '과장'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '차장'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '부장'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), '이사'); +INSERT INTO `position` (`organization_id`, `title`) VALUES + (@tech_org_id, '사원'), + (@tech_org_id, '주임'), + (@tech_org_id, '대리'), + (@tech_org_id, '과장'), + (@tech_org_id, '차장'), + (@tech_org_id, '부장'), + (@tech_org_id, '이사'); -- 6. 디지털솔루션 직책들 -INSERT INTO `positions` (`organization_id`, `title`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '사원'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '선임'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '책임'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '수석'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '팀장'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), '본부장'); +INSERT INTO `position` (`organization_id`, `title`) VALUES + (@digital_org_id, '사원'), + (@digital_org_id, '선임'), + (@digital_org_id, '책임'), + (@digital_org_id, '수석'), + (@digital_org_id, '팀장'), + (@digital_org_id, '본부장'); -- 7. 크리에이티브웍스 직책들 -INSERT INTO `positions` (`organization_id`, `title`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), '주니어'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), '시니어'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), '리드'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), '디렉터'); +INSERT INTO `position` (`organization_id`, `title`) VALUES + (@creative_org_id, '주니어'), + (@creative_org_id, '시니어'), + (@creative_org_id, '리드'), + (@creative_org_id, '디렉터'); -- 8. 외부 회사별 커스텀 역할 -- 테크이노베이션 역할 -INSERT INTO `roles` (`organization_id`, `name`, `description`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), 'DEPT_MANAGER', '부서 관리자 - 부서 내 관리 권한'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), 'TEAM_LEAD', '팀장 - 팀원 관리 및 프로젝트 리드'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), 'SENIOR_DEV', '시니어 개발자 - 개발 관련 고급 권한'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), 'JUNIOR_DEV', '주니어 개발자 - 개발 관련 기본 권한'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), 'PROJECT_MANAGER', '프로젝트 매니저 - 프로젝트 관리 권한'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), 'DESIGNER', '디자이너 - 디자인 관련 권한'), - ((SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), 'HR_SPECIALIST', '인사 담당자 - 인사 관리 권한'); +INSERT INTO `role` (`organization_id`, `name`, `description`) VALUES + (@tech_org_id, 'DEPT_MANAGER', '부서 관리자 - 부서 내 관리 권한'), + (@tech_org_id, 'TEAM_LEAD', '팀장 - 팀원 관리 및 프로젝트 리드'), + (@tech_org_id, 'SENIOR_DEV', '시니어 개발자 - 개발 관련 고급 권한'), + (@tech_org_id, 'JUNIOR_DEV', '주니어 개발자 - 개발 관련 기본 권한'), + (@tech_org_id, 'PROJECT_MANAGER', '프로젝트 매니저 - 프로젝트 관리 권한'), + (@tech_org_id, 'DESIGNER', '디자이너 - 디자인 관련 권한'), + (@tech_org_id, 'HR_SPECIALIST', '인사 담당자 - 인사 관리 권한'); -- 디지털솔루션 역할 -INSERT INTO `roles` (`organization_id`, `name`, `description`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), 'TECH_LEAD', '기술 리드 - 기술 관련 총괄'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), 'PRODUCT_OWNER', '프로덕트 오너 - 제품 기획 관리'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), 'QA_ENGINEER', 'QA 엔지니어 - 품질 보증'), - ((SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), 'DEVOPS', 'DevOps 엔지니어 - 인프라 관리'); +INSERT INTO `role` (`organization_id`, `name`, `description`) VALUES + (@digital_org_id, 'TECH_LEAD', '기술 리드 - 기술 관련 총괄'), + (@digital_org_id, 'PRODUCT_OWNER', '프로덕트 오너 - 제품 기획 관리'), + (@digital_org_id, 'QA_ENGINEER', 'QA 엔지니어 - 품질 보증'), + (@digital_org_id, 'DEVOPS', 'DevOps 엔지니어 - 인프라 관리'); -- 크리에이티브웍스 역할 -INSERT INTO `roles` (`organization_id`, `name`, `description`) VALUES - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), 'CREATIVE_DIRECTOR', '크리에이티브 디렉터 - 창작 총괄'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), 'ART_DIRECTOR', '아트 디렉터 - 예술 감독'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), 'MOTION_DESIGNER', '모션 디자이너 - 영상/애니메이션'), - ((SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), 'COPYWRITER', '카피라이터 - 콘텐츠 작성'); +INSERT INTO `role` (`organization_id`, `name`, `description`) VALUES + (@creative_org_id, 'CREATIVE_DIRECTOR', '크리에이티브 디렉터 - 창작 총괄'), + (@creative_org_id, 'ART_DIRECTOR', '아트 디렉터 - 예술 감독'), + (@creative_org_id, 'MOTION_DESIGNER', '모션 디자이너 - 영상/애니메이션'), + (@creative_org_id, 'COPYWRITER', '카피라이터 - 콘텐츠 작성'); -- 9. 외부 회사 테스트 사용자들 -INSERT INTO `users` (`name`, `email`, `password`, `status`) VALUES +INSERT INTO `user` (`name`, `email`, `password`, `status`) VALUES -- 테크이노베이션 직원 ('김철수', 'chulsoo.kim@techinnovation.co.kr', '$2a$10$encrypted_password_hash11', 'ACTIVE'), ('이영희', 'younghee.lee@techinnovation.co.kr', '$2a$10$encrypted_password_hash12', 'ACTIVE'), @@ -94,119 +100,80 @@ INSERT INTO `users` (`name`, `email`, `password`, `status`) VALUES ('홍지아', 'jia.hong@creativeworks.net', '$2a$10$encrypted_password_hash16', 'ACTIVE'); -- 10. 외부 회사 사용자-조직 연결 -INSERT INTO `user_organizations` (`user_id`, `organization_id`, `position_id`, `department_id`, `employee_number`, `status`) VALUES +INSERT INTO `user_organization` (`user_id`, `organization_id`, `position_id`, `department_id`, `employee_number`, `status`) VALUES -- 테크이노베이션 직원들 --- 김철수 - 개발팀 과장 -((SELECT id FROM users WHERE email = 'chulsoo.kim@techinnovation.co.kr'), - (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), - (SELECT id FROM positions WHERE title = '과장' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), - (SELECT id FROM departments WHERE name = '개발팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), - 'DEV25001', 'ACTIVE'), - --- 이영희 - 디자인팀 대리 -((SELECT id FROM users WHERE email = 'younghee.lee@techinnovation.co.kr'), - (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), - (SELECT id FROM positions WHERE title = '대리' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), - (SELECT id FROM departments WHERE name = '디자인팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), - 'DES25001', 'ACTIVE'), - --- 박민수 - 인사팀 차장 -((SELECT id FROM users WHERE email = 'minsu.park@techinnovation.co.kr'), - (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr'), - (SELECT id FROM positions WHERE title = '차장' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), - (SELECT id FROM departments WHERE name = '인사팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), - 'HR25001', 'ACTIVE'), +((SELECT id FROM user WHERE email = 'chulsoo.kim@techinnovation.co.kr'), @tech_org_id, (SELECT id FROM position WHERE title = '과장' AND organization_id = @tech_org_id), (SELECT id FROM department WHERE name = '개발팀' AND organization_id = @tech_org_id), 'DEV25001', 'ACTIVE'), +((SELECT id FROM user WHERE email = 'younghee.lee@techinnovation.co.kr'), @tech_org_id, (SELECT id FROM position WHERE title = '대리' AND organization_id = @tech_org_id), (SELECT id FROM department WHERE name = '디자인팀' AND organization_id = @tech_org_id), 'DES25001', 'ACTIVE'), +((SELECT id FROM user WHERE email = 'minsu.park@techinnovation.co.kr'), @tech_org_id, (SELECT id FROM position WHERE title = '차장' AND organization_id = @tech_org_id), (SELECT id FROM department WHERE name = '인사팀' AND organization_id = @tech_org_id), 'HR25001', 'ACTIVE'), -- 디지털솔루션 직원들 --- 정수연 - 개발팀 팀장 -((SELECT id FROM users WHERE email = 'sooyeon.jung@digitalsolution.com'), - (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), - (SELECT id FROM positions WHERE title = '팀장' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com')), - (SELECT id FROM departments WHERE name = '개발팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com')), - 'DEV25001', 'ACTIVE'), - --- 최현우 - 기획팀 책임 -((SELECT id FROM users WHERE email = 'hyunwoo.choi@digitalsolution.com'), - (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com'), - (SELECT id FROM positions WHERE title = '책임' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com')), - (SELECT id FROM departments WHERE name = '기획팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com')), - 'PLN25001', 'ACTIVE'), +((SELECT id FROM user WHERE email = 'sooyeon.jung@digitalsolution.com'), @digital_org_id, (SELECT id FROM position WHERE title = '팀장' AND organization_id = @digital_org_id), (SELECT id FROM department WHERE name = '개발팀' AND organization_id = @digital_org_id), 'DEV25001', 'ACTIVE'), +((SELECT id FROM user WHERE email = 'hyunwoo.choi@digitalsolution.com'), @digital_org_id, (SELECT id FROM position WHERE title = '책임' AND organization_id = @digital_org_id), (SELECT id FROM department WHERE name = '기획팀' AND organization_id = @digital_org_id), 'PLN25001', 'ACTIVE'), -- 크리에이티브웍스 직원 --- 홍지아 - 디자인팀 리드 -((SELECT id FROM users WHERE email = 'jia.hong@creativeworks.net'), - (SELECT id FROM organizations WHERE domain_name = 'creativeworks.net'), - (SELECT id FROM positions WHERE title = '리드' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'creativeworks.net')), - (SELECT id FROM departments WHERE name = '디자인팀' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'creativeworks.net')), - 'DES25001', 'ACTIVE'); +((SELECT id FROM user WHERE email = 'jia.hong@creativeworks.net'), @creative_org_id, (SELECT id FROM position WHERE title = '리드' AND organization_id = @creative_org_id), (SELECT id FROM department WHERE name = '디자인팀' AND organization_id = @creative_org_id), 'DES25001', 'ACTIVE'); -- 11. 외부 회사 사용자별 역할 할당 -- 테크이노베이션 --- 김철수에게 DEPT_MANAGER 역할 -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'DEPT_MANAGER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), + (SELECT id FROM role WHERE name = 'DEPT_MANAGER' AND organization_id = @tech_org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'chulsoo.kim@techinnovation.co.kr'; --- 이영희에게 DESIGNER 역할 -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'DESIGNER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), + (SELECT id FROM role WHERE name = 'DESIGNER' AND organization_id = @tech_org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'younghee.lee@techinnovation.co.kr'; --- 박민수에게 HR_SPECIALIST 역할 -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'HR_SPECIALIST' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), + (SELECT id FROM role WHERE name = 'HR_SPECIALIST' AND organization_id = @tech_org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'minsu.park@techinnovation.co.kr'; -- 디지털솔루션 --- 정수연에게 TECH_LEAD 역할 -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'TECH_LEAD' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com')), + (SELECT id FROM role WHERE name = 'TECH_LEAD' AND organization_id = @digital_org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'sooyeon.jung@digitalsolution.com'; --- 최현우에게 PRODUCT_OWNER 역할 -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'PRODUCT_OWNER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com')), + (SELECT id FROM role WHERE name = 'PRODUCT_OWNER' AND organization_id = @digital_org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'hyunwoo.choi@digitalsolution.com'; -- 크리에이티브웍스 --- 홍지아에게 CREATIVE_DIRECTOR 역할 -INSERT INTO `user_roles` (`role_id`, `user_organization_id`) +INSERT INTO `user_role` (`role_id`, `user_organization_id`) SELECT - (SELECT id FROM roles WHERE name = 'CREATIVE_DIRECTOR' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'creativeworks.net')), + (SELECT id FROM role WHERE name = 'CREATIVE_DIRECTOR' AND organization_id = @creative_org_id), uo.id -FROM user_organizations uo - JOIN users u ON u.id = uo.user_id +FROM user_organization uo + JOIN user u ON u.id = uo.user_id WHERE u.email = 'jia.hong@creativeworks.net'; -- 12. 외부 회사 역할별 기본 권한 할당 (샘플) -- DEPT_MANAGER 권한 -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'DEPT_MANAGER' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'techinnovation.co.kr')), + (SELECT id FROM role WHERE name = 'DEPT_MANAGER' AND organization_id = @tech_org_id), id -FROM permissions +FROM permission WHERE resource IN ( 'users.read.department', 'users.update', 'users.invite', 'departments.read', 'departments.manage', @@ -216,11 +183,11 @@ WHERE resource IN ( ); -- TECH_LEAD 권한 -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'TECH_LEAD' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'digitalsolution.com')), + (SELECT id FROM role WHERE name = 'TECH_LEAD' AND organization_id = @digital_org_id), id -FROM permissions +FROM permission WHERE resource LIKE 'ai.%' OR resource LIKE 'workflows.%' OR resource IN ( @@ -230,11 +197,11 @@ WHERE resource LIKE 'ai.%' ); -- CREATIVE_DIRECTOR 권한 -INSERT INTO `role_permissions` (`role_id`, `permission_id`) +INSERT INTO `role_permission` (`role_id`, `permission_id`) SELECT - (SELECT id FROM roles WHERE name = 'CREATIVE_DIRECTOR' AND organization_id = (SELECT id FROM organizations WHERE domain_name = 'creativeworks.net')), + (SELECT id FROM role WHERE name = 'CREATIVE_DIRECTOR' AND organization_id = @creative_org_id), id -FROM permissions +FROM permission WHERE resource LIKE 'content.%' OR resource LIKE 'campaigns.%' OR resource IN ( diff --git a/apps/user-service/src/main/resources/sql/schema.sql b/apps/user-service/src/main/resources/sql/schema.sql deleted file mode 100644 index e2a9a917..00000000 --- a/apps/user-service/src/main/resources/sql/schema.sql +++ /dev/null @@ -1,256 +0,0 @@ --- MariaDB 최적화된 스키마 (소문자, VARCHAR 크기 지정) -CREATE TABLE IF NOT EXISTS `permissions` ( - `id` int unsigned NOT NULL AUTO_INCREMENT, - `resource` varchar(100) NULL, - `description` varchar(255) NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `is_active` boolean DEFAULT TRUE, - `updated_by` bigint unsigned NULL, - `created_by` bigint unsigned NULL, - PRIMARY KEY (`id`) -); - -CREATE TABLE IF NOT EXISTS `organizations` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `name` varchar(150) NULL, - `domain_name` varchar(100) NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`) -); - -CREATE TABLE IF NOT EXISTS `roles` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `organization_id` bigint unsigned NULL, - `name` varchar(100) NULL, - `description` varchar(500) NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_organizations_to_roles` FOREIGN KEY (`organization_id`) - REFERENCES `organizations` (`id`) ON DELETE SET NULL -); - -CREATE TABLE IF NOT EXISTS `users` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `name` varchar(50) NULL, - `email` varchar(100) NULL, - `password` varchar(255) NULL, - `status` varchar(20) NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`) -); - -CREATE TABLE IF NOT EXISTS `departments` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `organization_id` bigint unsigned NOT NULL, - `name` varchar(100) NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_organizations_to_departments` FOREIGN KEY (`organization_id`) REFERENCES `organizations` (`id`) -); - -CREATE TABLE IF NOT EXISTS `positions` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `organization_id` bigint unsigned NOT NULL, - `title` varchar(100) NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_organizations_to_positions` FOREIGN KEY (`organization_id`) REFERENCES `organizations` (`id`) -); - -CREATE TABLE IF NOT EXISTS `user_organizations` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `user_id` bigint unsigned NOT NULL, - `organization_id` bigint unsigned NOT NULL, - `position_id` bigint unsigned NOT NULL, - `department_id` bigint unsigned NOT NULL, - `employee_number` varchar(50) NULL, - `status` varchar(20) NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`), - CONSTRAINT `fk_users_to_user_organizations` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`), - CONSTRAINT `fk_organizations_to_user_organizations` FOREIGN KEY (`organization_id`) REFERENCES `organizations` (`id`), - CONSTRAINT `fk_positions_to_user_organizations` FOREIGN KEY (`position_id`) REFERENCES `positions` (`id`), - CONSTRAINT `fk_departments_to_user_organizations` FOREIGN KEY (`department_id`) REFERENCES `departments` (`id`) -); - -CREATE TABLE IF NOT EXISTS `role_permissions` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `role_id` bigint unsigned NOT NULL, - `permission_id` int unsigned NOT NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_roles_to_role_permissions` FOREIGN KEY (`role_id`) REFERENCES `roles` (`id`), - CONSTRAINT `fk_permissions_to_role_permissions` FOREIGN KEY (`permission_id`) REFERENCES `permissions` (`id`), - UNIQUE KEY `uk_role_permission` (`role_id`, `permission_id`) -); - -CREATE TABLE IF NOT EXISTS `user_roles` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `role_id` bigint unsigned NOT NULL, - `user_organization_id` bigint unsigned NOT NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_roles_to_user_roles` FOREIGN KEY (`role_id`) REFERENCES `roles` (`id`), - CONSTRAINT `fk_user_organizations_to_user_roles` FOREIGN KEY (`user_organization_id`) REFERENCES `user_organizations` (`id`), - UNIQUE KEY `uk_user_role` (`role_id`, `user_organization_id`) -); - --- 성능 최적화를 위한 인덱스 -CREATE INDEX IF NOT EXISTS - `idx_users_email` ON `users` (`email`); -CREATE INDEX IF NOT EXISTS - `idx_users_status` ON `users` (`status`); -CREATE INDEX IF NOT EXISTS - `idx_user_organizations_user` ON `user_organizations` (`user_id`); -CREATE INDEX IF NOT EXISTS - `idx_user_organizations_org` ON `user_organizations` (`organization_id`); -CREATE INDEX IF NOT EXISTS - `idx_user_organizations_status` ON `user_organizations` (`status`); -CREATE INDEX IF NOT EXISTS - `idx_roles_org` ON `roles` (`organization_id`); -CREATE INDEX IF NOT EXISTS - `idx_permissions_resource` ON `permissions` (`resource`); -CREATE INDEX IF NOT EXISTS - `idx_permissions_active` ON `permissions` (`is_active`); - - - -CREATE TABLE IF NOT EXISTS `workflows` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `name` varchar(100) NOT NULL UNIQUE, - `description` text NULL, - `is_enabled` boolean DEFAULT TRUE, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `created_by` bigint unsigned NULL, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `updated_by` bigint unsigned NULL, - PRIMARY KEY (`id`) - ); - -CREATE TABLE IF NOT EXISTS `schedules` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `workflow_id` bigint unsigned NOT NULL, - `cron_expression` varchar(50) NULL, - `parameters` json NULL, - `is_active` boolean DEFAULT TRUE, - `last_run_status` varchar(20) NULL, - `last_run_at` timestamp NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `created_by` bigint unsigned NULL, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `updated_by` bigint unsigned NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_schedules_to_workflows` FOREIGN KEY (`workflow_id`) REFERENCES `workflows` (`id`) - ); - -CREATE TABLE IF NOT EXISTS `jobs` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `name` varchar(100) NOT NULL UNIQUE, - `description` text NULL, - `is_enabled` boolean DEFAULT TRUE, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `created_by` bigint unsigned NULL, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `updated_by` bigint unsigned NULL, - PRIMARY KEY (`id`) - ); - -CREATE TABLE IF NOT EXISTS `tasks` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `name` varchar(100) NOT NULL UNIQUE, - `type` varchar(50) NULL, - `parameters` json NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`) - ); - -CREATE TABLE IF NOT EXISTS `workflow_jobs` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `workflow_id` bigint unsigned NOT NULL, - `job_id` bigint unsigned NOT NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_workflow_jobs_to_workflows` FOREIGN KEY (`workflow_id`) REFERENCES `workflows` (`id`), - CONSTRAINT `fk_workflow_jobs_to_jobs` FOREIGN KEY (`job_id`) REFERENCES `jobs` (`id`), - UNIQUE KEY `uk_workflow_job` (`workflow_id`, `job_id`) - ); - -CREATE TABLE IF NOT EXISTS `job_tasks` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `job_id` bigint unsigned NOT NULL, - `task_id` bigint unsigned NOT NULL, - `execution_order` int NULL, - PRIMARY KEY (`id`), - CONSTRAINT `fk_job_tasks_to_jobs` FOREIGN KEY (`job_id`) REFERENCES `jobs` (`id`), - CONSTRAINT `fk_job_tasks_to_tasks` FOREIGN KEY (`task_id`) REFERENCES `tasks` (`id`), - UNIQUE KEY `uk_job_task` (`job_id`, `task_id`) - ); - -CREATE TABLE IF NOT EXISTS `execution_logs` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `execution_type` varchar(20) NULL COMMENT 'task, schedule, job, workflow', - `source_id` bigint unsigned NULL COMMENT '모든 데이터에 대한 ID ex: job_id, schedule_id, task_id, ...', - `log_level` varchar(20) NULL, - `executed_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `log_message` text NULL, - `trace_id` char(36) NULL, - `config_snapshot` json NULL, - PRIMARY KEY (`id`), - INDEX `idx_source_id_type` (`source_id`, `execution_type`) - ); - -CREATE TABLE IF NOT EXISTS `task_io_data` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `trace_id` char(36) NULL, - `io_type` varchar(10) NULL COMMENT 'INPUT, OUTPUT', - `name` varchar(100) NULL, - `data_type` varchar(50) NULL, - `data_value` json NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - PRIMARY KEY (`id`), - INDEX `idx_trace_id` (`trace_id`) - ); - -CREATE TABLE IF NOT EXISTS `configs` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `target_type` varchar(50) NULL COMMENT 'user, job, workflow', - `target_id` bigint unsigned NULL, - `version` int NULL, - `json` json NULL, - `is_active` boolean DEFAULT TRUE, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `created_by` bigint unsigned NULL, - PRIMARY KEY (`id`), - UNIQUE KEY `uk_config_target` (`target_type`, `target_id`) - ); - -CREATE TABLE IF NOT EXISTS `categories` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `name` varchar(100) NULL, - `description` text NULL, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`) - ); - -CREATE TABLE IF NOT EXISTS `user_configs` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `user_id` bigint unsigned NOT NULL, - `type` varchar(50) NULL, - `name` varchar(100) NULL, - `json` json NULL, - `is_active` boolean DEFAULT TRUE, - `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, - `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`) - ); - --- 인덱스 추가 (성능 최적화) -CREATE INDEX IF NOT EXISTS `idx_schedules_workflow` ON `schedules` (`workflow_id`); -CREATE INDEX IF NOT EXISTS `idx_jobs_enabled` ON `jobs` (`is_enabled`); -CREATE INDEX IF NOT EXISTS `idx_tasks_type` ON `tasks` (`type`); -CREATE INDEX IF NOT EXISTS `idx_workflows_enabled` ON `workflows` (`is_enabled`); -CREATE UNIQUE INDEX IF NOT EXISTS `uk_schedules_workflow` ON `schedules` (`workflow_id`); -CREATE UNIQUE INDEX IF NOT EXISTS `uk_job_name` ON `jobs` (`name`); -CREATE UNIQUE INDEX IF NOT EXISTS `uk_task_name` ON `tasks` (`name`); -CREATE UNIQUE INDEX IF NOT EXISTS `uk_workflows_name` ON `workflows` (`name`); -CREATE INDEX IF NOT EXISTS `idx_user_configs_user` ON `user_configs` (`user_id`); \ No newline at end of file diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/DatabaseConnectionTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/DatabaseConnectionTest.java index e744873b..c15170cc 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/DatabaseConnectionTest.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/DatabaseConnectionTest.java @@ -28,7 +28,7 @@ // @ActiveProfiles("test") // application-test-unit.yml 설정을 활성화 // @Transactional // 테스트 후 데이터 롤백 // @Sql( -// scripts = {"classpath:sql/create-schema.sql", "classpath:sql/insert-user-data.sql"}, +// scripts = {"classpath:sql/create-01-schema.sql", "classpath:sql/insert-user-data.sql"}, // executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) // class DatabaseConnectionTest { // From 3f174134273ab4fb9021ffbbde64d335b04064dd Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Tue, 9 Sep 2025 20:23:34 +0900 Subject: [PATCH 11/31] =?UTF-8?q?=EC=9C=A0=EC=A0=80=20=EC=84=B8=EC=85=98?= =?UTF-8?q?=20=EB=A1=9C=EA=B7=B8=EC=9D=B8=20(#57)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: 로그인 관련 dto 작성 * feat: login controller * feat: Login api * feat: 로그인 프론트 연동 permission 해결 안됨 * feat: User identity 설정 및 user 정보 return --- .../typehandler/StringListTypeHandler.java | 50 +++++++++++ .../config/security/SecurityConfig.java | 83 +++++++++++++++---- .../security/endpoints/SecurityEndpoints.java | 13 +-- .../auth/controller/AuthController.java | 39 +++++++++ .../domain/auth/dto/LoginRequestDto.java | 15 ++++ .../domain/auth/model/AuthCredential.java | 81 ++++++++++++++++++ .../auth/service/AuthCredentialAdapter.java | 28 +++++++ .../user/controller/UserController.java | 14 +++- .../user/dto/UserProfileResponseDto.java | 30 +++++++ .../gltkorea/icebang/mapper/AuthMapper.java | 3 + .../src/main/resources/application.yml | 3 +- .../src/main/resources/log4j2-develop.yml | 7 ++ .../resources/mybatis/mapper/AuthMapper.xml | 18 ++++ .../sql/01-insert-internal-users.sql | 2 +- 14 files changed, 354 insertions(+), 32 deletions(-) create mode 100644 apps/user-service/src/main/java/com/gltkorea/icebang/config/mybatis/typehandler/StringListTypeHandler.java create mode 100644 apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/dto/LoginRequestDto.java create mode 100644 apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/model/AuthCredential.java create mode 100644 apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/service/AuthCredentialAdapter.java create mode 100644 apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/UserProfileResponseDto.java diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/mybatis/typehandler/StringListTypeHandler.java b/apps/user-service/src/main/java/com/gltkorea/icebang/config/mybatis/typehandler/StringListTypeHandler.java new file mode 100644 index 00000000..4363124c --- /dev/null +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/config/mybatis/typehandler/StringListTypeHandler.java @@ -0,0 +1,50 @@ +package com.gltkorea.icebang.config.mybatis.typehandler; + +import java.sql.CallableStatement; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.apache.ibatis.type.BaseTypeHandler; +import org.apache.ibatis.type.JdbcType; +import org.apache.ibatis.type.MappedJdbcTypes; +import org.apache.ibatis.type.MappedTypes; + +@MappedTypes(List.class) +@MappedJdbcTypes(JdbcType.VARCHAR) +public class StringListTypeHandler extends BaseTypeHandler> { + + @Override + public void setNonNullParameter( + PreparedStatement ps, int i, List parameter, JdbcType jdbcType) throws SQLException { + ps.setString(i, String.join(",", parameter)); + } + + @Override + public List getNullableResult(ResultSet rs, String columnName) throws SQLException { + String value = rs.getString(columnName); + return convertToList(value); + } + + @Override + public List getNullableResult(ResultSet rs, int columnIndex) throws SQLException { + String value = rs.getString(columnIndex); + return convertToList(value); + } + + @Override + public List getNullableResult(CallableStatement cs, int columnIndex) throws SQLException { + String value = cs.getString(columnIndex); + return convertToList(value); + } + + private List convertToList(String value) { + if (value == null || value.trim().isEmpty()) { + return new ArrayList<>(); + } + return Arrays.asList(value.split(",")); + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/SecurityConfig.java b/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/SecurityConfig.java index 4a2fff36..69ee1bf0 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/SecurityConfig.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/SecurityConfig.java @@ -5,14 +5,22 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.env.Environment; +import org.springframework.security.authentication.AuthenticationManager; +import org.springframework.security.authentication.AuthenticationProvider; +import org.springframework.security.authentication.dao.DaoAuthenticationProvider; +import org.springframework.security.config.annotation.authentication.configuration.AuthenticationConfiguration; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configurers.AbstractHttpConfigurer; import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; import org.springframework.security.crypto.password.NoOpPasswordEncoder; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.security.web.SecurityFilterChain; +import org.springframework.web.cors.CorsConfiguration; +import org.springframework.web.cors.UrlBasedCorsConfigurationSource; +import org.springframework.web.filter.CorsFilter; import com.gltkorea.icebang.config.security.endpoints.SecurityEndpoints; +import com.gltkorea.icebang.domain.auth.service.AuthCredentialAdapter; import lombok.RequiredArgsConstructor; @@ -20,6 +28,21 @@ @RequiredArgsConstructor public class SecurityConfig { private final Environment environment; + private final AuthCredentialAdapter userDetailsService; + + @Bean + public AuthenticationProvider authenticationProvider() { + DaoAuthenticationProvider provider = new DaoAuthenticationProvider(); + provider.setUserDetailsService(userDetailsService); + provider.setPasswordEncoder(bCryptPasswordEncoder()); + return provider; + } + + @Bean + public AuthenticationManager authenticationManager(AuthenticationConfiguration config) + throws Exception { + return config.getAuthenticationManager(); + } @Bean public SecureRandom secureRandom() { @@ -34,31 +57,46 @@ public SecurityFilterChain filterChain(HttpSecurity http) throws Exception { .permitAll() .requestMatchers("/auth/login", "/auth/logout") .permitAll() + .requestMatchers("/v0/auth/check-session") + .authenticated() .requestMatchers(SecurityEndpoints.DATA_ADMIN.getMatchers()) - .hasAuthority("SUPER_ADMIN") + .hasRole("SUPER_ADMIN") // hasAuthority -> hasRole .requestMatchers(SecurityEndpoints.DATA_ENGINEER.getMatchers()) - .hasAnyAuthority( - "SUPER_ADMIN", "ADMIN", "SENIOR_DATA_ENGINEER", "DATA_ENGINEER") + .hasAnyRole( + "SUPER_ADMIN", + "SYSTEM_ADMIN", + "AI_ENGINEER", + "DATA_SCIENTIST", + "CRAWLING_ENGINEER", + "TECH_LEAD", + "DEVOPS") .requestMatchers(SecurityEndpoints.ANALYST.getMatchers()) - .hasAnyAuthority( + .hasAnyRole( "SUPER_ADMIN", - "ADMIN", - "SENIOR_DATA_ENGINEER", - "DATA_ENGINEER", - "SENIOR_DATA_ANALYST", - "DATA_ANALYST", - "VIEWER") + "SYSTEM_ADMIN", + "ORG_ADMIN", + "DATA_SCIENTIST", + "MARKETING_ANALYST", + "QA_ENGINEER", + "PROJECT_MANAGER", + "PRODUCT_OWNER", + "USER") .requestMatchers(SecurityEndpoints.OPS.getMatchers()) - .hasAnyAuthority( - "SUPER_ADMIN", "ADMIN", "SENIOR_DATA_ENGINEER", "DATA_ENGINEER") + .hasAnyRole( + "SUPER_ADMIN", + "SYSTEM_ADMIN", + "WORKFLOW_ADMIN", + "OPERATIONS_MANAGER", + "DEVOPS", + "TECH_LEAD") .requestMatchers(SecurityEndpoints.USER.getMatchers()) - .authenticated() + .hasAnyRole("SUPER_ADMIN", "SYSTEM_ADMIN", "ORG_ADMIN", "USER") .anyRequest() .authenticated()) .formLogin(AbstractHttpConfigurer::disable) .logout( logout -> logout.logoutUrl("/auth/logout").logoutSuccessUrl("/auth/login").permitAll()) - .csrf(AbstractHttpConfigurer::disable) // API 사용을 위해 CSRF 비활성화 + .csrf(AbstractHttpConfigurer::disable) .build(); } @@ -67,10 +105,25 @@ public PasswordEncoder bCryptPasswordEncoder() { String[] activeProfiles = environment.getActiveProfiles(); for (String profile : activeProfiles) { - if ("dev".equals(profile) || "test".equals(profile)) { + if ("develop".equals(profile) || "test".equals(profile)) { return NoOpPasswordEncoder.getInstance(); } } return new BCryptPasswordEncoder(); } + + @Bean + public CorsFilter corsFilter() { + CorsConfiguration config = new CorsConfiguration(); + config.addAllowedOrigin("http://localhost:3000"); // 프론트 주소 + config.addAllowedOrigin("https://admin.icebang.site"); // 프론트 주소 + config.addAllowedHeader("*"); + config.addAllowedMethod("*"); + config.setAllowCredentials(true); // 세션 쿠키 허용 + + UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource(); + source.registerCorsConfiguration("/**", config); + + return new CorsFilter(source); + } } diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java b/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java index c73f462d..bc6eafe2 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java @@ -1,16 +1,7 @@ package com.gltkorea.icebang.config.security.endpoints; public enum SecurityEndpoints { - PUBLIC( - "/", - "/login", - "/register", - "/api/public/**", - "/health", - "/css/**", - "/js/**", - "/images/**", - "/v0/**"), + PUBLIC("/", "/v0/auth/login", "/api/public/**", "/health", "/css/**", "/js/**", "/images/**"), // 데이터 관리 관련 엔드포인트 DATA_ADMIN("/admin/**", "/api/admin/**", "/management/**", "/actuator/**"), @@ -25,7 +16,7 @@ public enum SecurityEndpoints { OPS("/api/scheduler/**", "/api/monitoring/**"), // 일반 사용자 엔드포인트 - USER("/user/**", "/profile/**"); + USER("/user/**", "/profile/**", "/v0/auth/check-session"); private final String[] patterns; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/controller/AuthController.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/controller/AuthController.java index 5da466f6..39fba398 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/controller/AuthController.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/controller/AuthController.java @@ -1,12 +1,22 @@ package com.gltkorea.icebang.domain.auth.controller; import org.springframework.http.HttpStatus; +import org.springframework.security.authentication.AuthenticationManager; +import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.annotation.AuthenticationPrincipal; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.security.web.context.HttpSessionSecurityContextRepository; import org.springframework.web.bind.annotation.*; import com.gltkorea.icebang.common.dto.ApiResponse; +import com.gltkorea.icebang.domain.auth.dto.LoginRequestDto; import com.gltkorea.icebang.domain.auth.dto.RegisterDto; +import com.gltkorea.icebang.domain.auth.model.AuthCredential; import com.gltkorea.icebang.domain.auth.service.AuthService; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpSession; import jakarta.validation.Valid; import lombok.RequiredArgsConstructor; @@ -15,6 +25,7 @@ @RequiredArgsConstructor public class AuthController { private final AuthService authService; + private final AuthenticationManager authenticationManager; @PostMapping("/register") @ResponseStatus(HttpStatus.CREATED) @@ -22,4 +33,32 @@ public ApiResponse register(@Valid @RequestBody RegisterDto registerDto) { authService.registerUser(registerDto); return ApiResponse.success(null); } + + @PostMapping("/login") + public ApiResponse login( + @RequestBody LoginRequestDto request, HttpServletRequest httpRequest) { + UsernamePasswordAuthenticationToken token = + new UsernamePasswordAuthenticationToken(request.getEmail(), request.getPassword()); + + Authentication auth = authenticationManager.authenticate(token); + + SecurityContextHolder.getContext().setAuthentication(auth); + + HttpSession session = httpRequest.getSession(true); + session.setAttribute( + HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY, + SecurityContextHolder.getContext()); + + return ApiResponse.success(auth); + } + + @GetMapping("/check-session") + public ApiResponse checkSession(@AuthenticationPrincipal AuthCredential user) { + return ApiResponse.success(user != null); + } + + @GetMapping("/permissions") + public ApiResponse getPermissions(@AuthenticationPrincipal AuthCredential user) { + return ApiResponse.success(user); + } } diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/dto/LoginRequestDto.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/dto/LoginRequestDto.java new file mode 100644 index 00000000..081d2016 --- /dev/null +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/dto/LoginRequestDto.java @@ -0,0 +1,15 @@ +package com.gltkorea.icebang.domain.auth.dto; + +import jakarta.validation.constraints.Email; +import jakarta.validation.constraints.NotBlank; +import lombok.Data; + +@Data +public class LoginRequestDto { + @NotBlank(message = "이메일은 필수입니다") + @Email(message = "올바른 이메일 형식이 아닙니다") + private String email; + + @NotBlank(message = "비밃번호는 필수입니다") + private String password; +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/model/AuthCredential.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/model/AuthCredential.java new file mode 100644 index 00000000..ab4acc2e --- /dev/null +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/model/AuthCredential.java @@ -0,0 +1,81 @@ +package com.gltkorea.icebang.domain.auth.model; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; + +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.core.userdetails.UserDetails; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@AllArgsConstructor +@NoArgsConstructor +public class AuthCredential implements UserDetails { + + private BigInteger id; + private String email; + private String password; + private String status; + + // roles -> Spring Security authority로 변환 + private List roles; + + // MyBatis GROUP_CONCAT 결과를 List으로 변환하는 setter + public void setRoles(String rolesString) { + if (rolesString != null && !rolesString.trim().isEmpty()) { + this.roles = Arrays.asList(rolesString.split(",")); + } else { + this.roles = new ArrayList<>(); + } + } + + public void setRoles(List roles) { + this.roles = roles; + } + + public List getRoles() { + return roles != null ? roles : new ArrayList<>(); + } + + @Override + public Collection getAuthorities() { + return getRoles().stream() + .map(role -> new SimpleGrantedAuthority("ROLE_" + role.trim())) // ROLE_ prefix 추가 + 공백 제거 + .collect(Collectors.toList()); + } + + @Override + public String getUsername() { + return email; // 로그인 ID는 email + } + + @Override + public boolean isAccountNonExpired() { + return true; // 필요 시 status 기반으로 변경 가능 + } + + @Override + public boolean isAccountNonLocked() { + return !"LOCKED".equalsIgnoreCase(status); + } + + @Override + public boolean isCredentialsNonExpired() { + return true; + } + + @Override + public boolean isEnabled() { + return !"DISABLED".equalsIgnoreCase(status); + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/service/AuthCredentialAdapter.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/service/AuthCredentialAdapter.java new file mode 100644 index 00000000..e3268314 --- /dev/null +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/service/AuthCredentialAdapter.java @@ -0,0 +1,28 @@ +package com.gltkorea.icebang.domain.auth.service; + +import org.springframework.security.core.userdetails.UserDetails; +import org.springframework.security.core.userdetails.UserDetailsService; +import org.springframework.security.core.userdetails.UsernameNotFoundException; +import org.springframework.stereotype.Service; + +import com.gltkorea.icebang.domain.auth.model.AuthCredential; +import com.gltkorea.icebang.mapper.AuthMapper; + +import lombok.RequiredArgsConstructor; + +@Service +@RequiredArgsConstructor +public class AuthCredentialAdapter implements UserDetailsService { + private final AuthMapper authMapper; + + @Override + public UserDetails loadUserByUsername(String email) throws UsernameNotFoundException { + AuthCredential user = authMapper.findUserByEmail(email); + + if (user == null) { + throw new UsernameNotFoundException("User not found with email: " + email); + } + + return user; + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/controller/UserController.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/controller/UserController.java index e6b07bce..534e9ba6 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/controller/UserController.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/controller/UserController.java @@ -1,13 +1,13 @@ package com.gltkorea.icebang.domain.user.controller; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RestController; +import org.springframework.security.core.annotation.AuthenticationPrincipal; +import org.springframework.web.bind.annotation.*; import com.gltkorea.icebang.common.dto.ApiResponse; +import com.gltkorea.icebang.domain.auth.model.AuthCredential; import com.gltkorea.icebang.domain.user.dto.CheckEmailRequest; import com.gltkorea.icebang.domain.user.dto.CheckEmailResponse; +import com.gltkorea.icebang.domain.user.dto.UserProfileResponseDto; import com.gltkorea.icebang.domain.user.service.UserService; import jakarta.validation.Valid; @@ -27,4 +27,10 @@ public ApiResponse checkEmailAvailable( return ApiResponse.success(CheckEmailResponse.builder().available(available).build(), message); } + + @GetMapping("/me") + public ApiResponse getUserProfile( + @AuthenticationPrincipal AuthCredential user) { + return ApiResponse.success(UserProfileResponseDto.from(user)); + } } diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/UserProfileResponseDto.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/UserProfileResponseDto.java new file mode 100644 index 00000000..9254ace7 --- /dev/null +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/UserProfileResponseDto.java @@ -0,0 +1,30 @@ +package com.gltkorea.icebang.domain.user.dto; + +import java.math.BigInteger; +import java.util.List; + +import com.gltkorea.icebang.domain.auth.model.AuthCredential; + +import lombok.Getter; + +@Getter +public class UserProfileResponseDto { + + private final BigInteger id; + private final String email; + private final String name; + private final List roles; + private final String status; + + public UserProfileResponseDto(AuthCredential authCredential) { + this.id = authCredential.getId(); + this.email = authCredential.getEmail(); + this.name = authCredential.getEmail(); // name 필드가 없으면 email 사용 + this.roles = authCredential.getRoles(); + this.status = authCredential.getStatus(); + } + + public static UserProfileResponseDto from(AuthCredential authCredential) { + return new UserProfileResponseDto(authCredential); + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/AuthMapper.java b/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/AuthMapper.java index 09033730..4480daf0 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/AuthMapper.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/AuthMapper.java @@ -3,9 +3,12 @@ import org.apache.ibatis.annotations.Mapper; import com.gltkorea.icebang.domain.auth.dto.RegisterDto; +import com.gltkorea.icebang.domain.auth.model.AuthCredential; @Mapper public interface AuthMapper { + AuthCredential findUserByEmail(String email); + boolean existsByEmail(String email); int insertUser(RegisterDto dto); // users insert diff --git a/apps/user-service/src/main/resources/application.yml b/apps/user-service/src/main/resources/application.yml index 278dfb11..e852951b 100644 --- a/apps/user-service/src/main/resources/application.yml +++ b/apps/user-service/src/main/resources/application.yml @@ -5,4 +5,5 @@ spring: active: develop mybatis: # Mapper XML 파일 위치 - mapper-locations: classpath:mapper/**/*.xml \ No newline at end of file + mapper-locations: classpath:mapper/**/*.xml + type-handlers-package: com.gltkorea.icebang.config.mybatis.typehandler \ No newline at end of file diff --git a/apps/user-service/src/main/resources/log4j2-develop.yml b/apps/user-service/src/main/resources/log4j2-develop.yml index d1afc02b..1b5c6e35 100644 --- a/apps/user-service/src/main/resources/log4j2-develop.yml +++ b/apps/user-service/src/main/resources/log4j2-develop.yml @@ -119,6 +119,13 @@ Configuration: # 6. 트랜잭션 로그 - DB 작업 추적 - name: org.springframework.transaction + level: DEBUG + additivity: "false" + AppenderRef: + - ref: console-appender + - ref: file-info-appender + + - name: com.gltkorea.icebang.domain.auth.mapper level: DEBUG additivity: "false" AppenderRef: diff --git a/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml index c503e76e..0023c224 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml @@ -12,6 +12,24 @@ ) + + + INSERT INTO user (name, email, password) VALUES (#{name}, #{email}, #{password}); diff --git a/apps/user-service/src/main/resources/sql/01-insert-internal-users.sql b/apps/user-service/src/main/resources/sql/01-insert-internal-users.sql index 3a8529c8..1a69076e 100644 --- a/apps/user-service/src/main/resources/sql/01-insert-internal-users.sql +++ b/apps/user-service/src/main/resources/sql/01-insert-internal-users.sql @@ -147,7 +147,7 @@ INSERT INTO `user` (`name`, `email`, `password`, `status`) VALUES ('홍크롤러', 'crawler.hong@icebang.site', '$2a$10$encrypted_password_hash6', 'ACTIVE'), ('서데이터', 'data.seo@icebang.site', '$2a$10$encrypted_password_hash7', 'ACTIVE'), ('윤워크플로', 'workflow.yoon@icebang.site', '$2a$10$encrypted_password_hash8', 'ACTIVE'), - ('시스템관리자', 'admin@icebang.site', '$2a$10$encrypted_password_hash0', 'ACTIVE'); + ('시스템관리자', 'admin@icebang.site', 'qwer1234!A', 'ACTIVE'); -- 8. icebang 직원-조직 연결 INSERT INTO `user_organization` (`user_id`, `organization_id`, `position_id`, `department_id`, `employee_number`, `status`) VALUES From dfca7f08a41ff493ba1cfdfb028f22ea35d696aa Mon Sep 17 00:00:00 2001 From: can019 Date: Tue, 9 Sep 2025 20:24:34 +0900 Subject: [PATCH 12/31] =?UTF-8?q?hotfix:=20login=20response=EC=8B=9C=20aut?= =?UTF-8?q?h=20=EC=A0=95=EB=B3=B4=EB=A5=BC=20return=ED=95=98=EB=8D=98=20?= =?UTF-8?q?=EB=B2=84=EA=B7=B8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../gltkorea/icebang/domain/auth/controller/AuthController.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/controller/AuthController.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/controller/AuthController.java index 39fba398..a33503cf 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/controller/AuthController.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/controller/AuthController.java @@ -49,7 +49,7 @@ public ApiResponse login( HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY, SecurityContextHolder.getContext()); - return ApiResponse.success(auth); + return ApiResponse.success(null); } @GetMapping("/check-session") From a6dfd34878354c5d4c62ea1f0bbe8b795fd5f874 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Tue, 9 Sep 2025 20:37:30 +0900 Subject: [PATCH 13/31] =?UTF-8?q?feat:=20Blogger=20=EB=B8=94=EB=A1=9C?= =?UTF-8?q?=EA=B7=B8=20=ED=8F=AC=EC=8A=A4=ED=8C=85=20=EC=84=9C=EB=B9=84?= =?UTF-8?q?=EC=8A=A4=20=EA=B5=AC=ED=98=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Blogger API 인증 및 포스팅 기능을 추가하여 블로그에 글 작성 가능하도록 함. - 기존 WebDriver 기반 로직을 대체할 수 있도록 API 인증, 포스트 작성, 설정 로드 기능 포함. - blog 엔드포인트 수정 및 추가 - schemas 수정 --- .../app/api/endpoints/blog.py | 34 +++- .../app/model/schemas.py | 4 +- .../service/blog/blogger_blog_post_service.py | 145 ++++++++++++++++++ 3 files changed, 174 insertions(+), 9 deletions(-) create mode 100644 apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py diff --git a/apps/pre-processing-service/app/api/endpoints/blog.py b/apps/pre-processing-service/app/api/endpoints/blog.py index 341c0aac..85c6924c 100644 --- a/apps/pre-processing-service/app/api/endpoints/blog.py +++ b/apps/pre-processing-service/app/api/endpoints/blog.py @@ -4,6 +4,7 @@ from ...model.schemas import * from app.service.blog.tistory_blog_post_service import TistoryBlogPostService from app.service.blog.naver_blog_post_service import NaverBlogPostService +from ...service.blog.blogger_blog_post_service import BloggerBlogPostService router = APIRouter() @@ -28,9 +29,9 @@ async def publish(request: RequestBlogPublish): if request.tag == "naver": naver_service = NaverBlogPostService() result = naver_service.post_content( - title=request.title, - content=request.content, - tags=request.tags + title=request.post_title, + content=request.post_content, + tags=request.post_tags ) if not result: @@ -43,12 +44,12 @@ async def publish(request: RequestBlogPublish): metadata=result ) - else: + elif request.tag == "tistory": tistory_service = TistoryBlogPostService() result = tistory_service.post_content( - title=request.title, - content=request.content, - tags=request.tags + title=request.post_title, + content=request.post_content, + tags=request.post_tags ) if not result: @@ -61,3 +62,22 @@ async def publish(request: RequestBlogPublish): status="200", metadata=result ) + + elif request.tag == "blogger": + blogger_service = BloggerBlogPostService() + result = blogger_service.post_content( + title=request.post_title, + content=request.post_content, + tags=request.post_tags + ) + + if not result: + raise CustomException("블로거 블로그 포스팅에 실패했습니다.", status_code=500) + + return ResponseBlogPublish( + job_id= 1, + schedule_id= 1, + schedule_his_id= 1, + status="200", + metadata=result + ) \ No newline at end of file diff --git a/apps/pre-processing-service/app/model/schemas.py b/apps/pre-processing-service/app/model/schemas.py index 36d0514f..b3982638 100644 --- a/apps/pre-processing-service/app/model/schemas.py +++ b/apps/pre-processing-service/app/model/schemas.py @@ -69,14 +69,14 @@ class ResponseSadaguCrawl(ResponseBase): # 블로그 콘텐츠 생성 class RequestBlogCreate(RequestBase): - tag: str = Field(..., title="블로그 태그", description="블로그 플랫폼 종류 태그") - category: str = Field(..., title="카테고리", description="검색(상품) 카테고리") + pass class ResponseBlogCreate(ResponseBase): pass # 블로그 배포 class RequestBlogPublish(RequestBase): + tag: str = Field(..., title="블로그 태그", description="블로그 플랫폼 종류") blog_id: str = Field(..., description= "블로그 아이디") blog_pw: str = Field(..., description= "블로그 비밀번호") post_title: str = Field(..., description= "포스팅 제목") diff --git a/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py b/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py new file mode 100644 index 00000000..cd5d1126 --- /dev/null +++ b/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py @@ -0,0 +1,145 @@ +import json +import os +import pickle +from typing import Dict, List, Optional + +from googleapiclient.discovery import build +from google.auth.transport.requests import Request +from google_auth_oauthlib.flow import InstalledAppFlow + +from app.errors.BlogPostingException import * +from app.service.blog.base_blog_post_service import BaseBlogPostService + + +class BloggerBlogPostService(BaseBlogPostService): + """ + Blogger API를 사용하여 포스팅을 관리하는 서비스 + """ + + def __init__(self, config_file="blog_config.json"): + # 부모 클래스 생성자 호출 (WebDriver는 None으로 설정됨) + super().__init__() + + # API 관련 추가 초기화 + self.config_file = config_file + self.blogger_service = None + self.blog_id = None + self.scopes = ['https://www.googleapis.com/auth/blogger'] + + def _requires_webdriver(self) -> bool: + """API 기반 서비스는 WebDriver가 필요하지 않음""" + return False + + def _load_config(self) -> None: + """ + 플랫폼별 설정 로드 + """ + try: + with open(self.config_file, 'r', encoding='utf-8') as f: + self.config = json.load(f) + self.current_upload_account = self.config['upload_account'] + except FileNotFoundError: + default_config = { + "upload_account": "your_account@gmail.com", + "credentials": "credentials.json" + } + with open(self.config_file, 'w', encoding='utf-8') as f: + json.dump(default_config, f, indent=2) + self.config = default_config + self.current_upload_account = self.config['upload_account'] + + def _login(self) -> None: + """ + API 인증 (Selenium의 로그인을 대체) + """ + self._authenticate_api() + + def _authenticate_api(self): + """ + API 인증 및 서비스 객체 생성 + """ + token_file = f"token_{self.current_upload_account.replace('@', '_').replace('.', '_')}.pkl" + + try: + creds = None + if os.path.exists(token_file): + with open(token_file, 'rb') as token: + creds = pickle.load(token) + + if not creds or not creds.valid: + if creds and creds.expired and creds.refresh_token: + creds.refresh(Request()) + else: + print(f"새 API 인증이 필요합니다: {self.current_upload_account}") + flow = InstalledAppFlow.from_client_secrets_file( + self.config['credentials'], self.scopes + ) + creds = flow.run_local_server(port=0) + + with open(token_file, 'wb') as token: + pickle.dump(creds, token) + + self.blogger_service = build('blogger', 'v3', credentials=creds) + + blogs = self.blogger_service.blogs().listByUser(userId='self').execute() + if blogs.get('items'): + self.blog_id = blogs['items'][0]['id'] + print(f"API 설정 완료 - 블로그: {blogs['items'][0]['name']}") + return True + else: + print("블로그를 찾을 수 없습니다.") + return False + except Exception as e: + print(f"API 인증/설정 실패: {e}") + raise BloggerApiException("API 인증 실패", e) + + def _write_content(self, title: str, content: str, tags: List[str] = None) -> None: + """ + API를 사용하여 포스팅 작성 + """ + if not self.blogger_service or not self.blog_id: + self._authenticate_api() + + post_data = { + 'title': title, + 'content': content, + 'labels': tags or [] + } + + try: + result = self.blogger_service.posts().insert( + blogId=self.blog_id, + body=post_data + ).execute() + + print(f"포스트 생성 완료: {result.get('url')}") + except Exception as e: + raise BlogPostPublishException( + platform="Blogger", + reason="API 통신 중 오류가 발생했습니다." + ) from e + + def _get_platform_name(self) -> str: + """플랫폼 이름 반환""" + return "Blogger" + + def _validate_content(self, title: str, content: str, tags: Optional[List[str]] = None) -> None: + """ + 공통 유효성 검사 로직 + """ + if not title or not title.strip(): + raise BlogContentValidationException("title", "제목이 비어있습니다") + + if not content or not content.strip(): + raise BlogContentValidationException("content", "내용이 비어있습니다") + + # 태그 유효성 검사도 필요에 따라 추가 + # if not tags or not isinstance(tags, list): + # raise BlogContentValidationException("tags", "태그는 리스트 형태여야 합니다") + + def __del__(self): + """ + 리소스 정리 - API 기반 서비스는 별도 정리 불필요 + 부모 클래스의 __del__이 WebDriver 정리를 처리 + """ + super().__del__() \ No newline at end of file From b4c83aacda35d241c62eca8afe5cdf716b23ca86 Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Tue, 9 Sep 2025 21:36:58 +0900 Subject: [PATCH 14/31] =?UTF-8?q?User=20login=20test=20=EB=B0=8F=20rest=20?= =?UTF-8?q?docs=20-=20swagger=20=EC=84=A4=EC=A0=95=20(#59)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * test: User login test * test: User login test with api docs * chore: swagger github action (experimental) * chore: Spring swagger yml target dir 수정 --- .github/workflows/ci-java.yml | 40 ++++++++- apps/user-service/build.gradle | 57 +++++++++++++ .../config/security/SecurityConfig.java | 2 +- .../icebang/config/E2eTestConfiguration.java | 5 ++ .../icebang/config/RestDocsConfiguration.java | 29 +++++++ .../controller/AuthControllerE2eTest.java | 82 +++++++++++++++++++ .../icebang/support/E2eTestSupport.java | 38 ++++++++- .../icebang/support/E2eTestSupportTest.java | 14 ---- 8 files changed, 248 insertions(+), 19 deletions(-) create mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/config/RestDocsConfiguration.java create mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/domain/auth/controller/AuthControllerE2eTest.java diff --git a/.github/workflows/ci-java.yml b/.github/workflows/ci-java.yml index 3f1a3b76..91e57996 100644 --- a/.github/workflows/ci-java.yml +++ b/.github/workflows/ci-java.yml @@ -21,6 +21,8 @@ permissions: security-events: write checks: write pull-requests: write + pages: write # GitHub Pages 배포를 위해 추가 + id-token: write # GitHub Pages 배포를 위해 추가 jobs: spotless-check: @@ -90,6 +92,13 @@ jobs: name: build-artifacts path: apps/user-service/build/libs/ + - name: Upload OpenAPI spec artifacts + if: matrix.java-version == '21' && github.ref == 'refs/heads/main' && github.event_name == 'push' + uses: actions/upload-artifact@v4 + with: + name: openapi-spec + path: apps/user-service/build/api-spec/ + docker: name: Build Spring Boot Docker Image and push to registry runs-on: ubuntu-latest @@ -129,4 +138,33 @@ jobs: - name: Analyze image layers run: | echo "=== Image Layer Analysis ===" - docker history ghcr.io/${{ env.REPO_LC }}/user-service:latest --human --no-trunc \ No newline at end of file + docker history ghcr.io/${{ env.REPO_LC }}/user-service:latest --human --no-trunc + + swagger-docs: + name: Deploy Swagger Documentation + runs-on: ubuntu-latest + if: github.ref == 'refs/heads/main' && github.event_name == 'push' + needs: build + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Download OpenAPI spec artifacts + uses: actions/download-artifact@v4 + with: + name: openapi-spec + path: ./openapi-spec + + - name: Generate Swagger UI + uses: Legion2/swagger-ui-action@v1 + with: + output: user-service-swagger-ui + spec-file: openapi-spec/openapi3.yaml + + - name: Deploy to GitHub Pages + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./user-service-docs + destination_dir: user-service \ No newline at end of file diff --git a/apps/user-service/build.gradle b/apps/user-service/build.gradle index 145af4f6..45abf367 100644 --- a/apps/user-service/build.gradle +++ b/apps/user-service/build.gradle @@ -3,6 +3,8 @@ plugins { id 'org.springframework.boot' version '3.5.4' id 'io.spring.dependency-management' version '1.1.7' id 'com.diffplug.spotless' version '7.2.1' + id 'org.asciidoctor.jvm.convert' version '3.3.2' + id 'com.epages.restdocs-api-spec' version '0.18.2' } group = 'com.gltkorea' @@ -23,6 +25,8 @@ configurations { all { exclude group: 'org.springframework.boot', module: 'spring-boot-starter-logging' } + // AsciiDoctor Extension for REST Docs + asciidoctorExt } repositories { @@ -74,9 +78,21 @@ dependencies { testImplementation 'org.testcontainers:mariadb' testImplementation 'com.h2database:h2' testRuntimeOnly 'org.junit.platform:junit-platform-launcher' + + // Spring REST Docs + testImplementation 'org.springframework.restdocs:spring-restdocs-mockmvc' + testImplementation 'org.springframework.restdocs:spring-restdocs-webtestclient' + asciidoctorExt 'org.springframework.restdocs:spring-restdocs-asciidoctor' + testImplementation 'com.epages:restdocs-api-spec-mockmvc:0.18.2' +} + +// REST Docs 스니펫 디렉토리 설정 +ext { + snippetsDir = file('build/generated-snippets') } tasks.named('test') { + outputs.dir snippetsDir useJUnitPlatform { // 기본적으로는 e2e 태그 제외하고 실행 excludeTags 'e2e' @@ -86,6 +102,7 @@ tasks.named('test') { // E2E 테스트 전용 task 추가 tasks.register('e2eTest', Test) { + outputs.dir snippetsDir useJUnitPlatform { includeTags 'e2e' } @@ -98,9 +115,41 @@ tasks.register('e2eTest', Test) { // 모든 테스트 실행 task tasks.register('allTests', Test) { + outputs.dir snippetsDir useJUnitPlatform() } +// AsciiDoctor 설정 (REST Docs 문서 생성) +asciidoctor { + inputs.dir snippetsDir + configurations 'asciidoctorExt' + dependsOn test + + baseDirFollowsSourceDir() + + attributes( + 'snippets': snippetsDir, + 'source-highlighter': 'coderay', + 'toc': 'left', + 'toclevels': '3', + 'sectlinks': 'true', + 'operation-curl-request-title': 'Example request', + 'operation-http-response-title': 'Example response' + ) +} + +asciidoctor.doFirst { + delete file('src/docs/asciidoc') +} + +// JAR에 생성된 문서 포함 +bootJar { + dependsOn asciidoctor + from ("${asciidoctor.outputDir}/html5") { + into 'static/docs' + } +} + spotless { java { googleJavaFormat('1.17.0') @@ -118,3 +167,11 @@ spotless { endWithNewline() } } + +openapi3 { + server = 'http://localhost:8080' + title = 'IceBang API' + description = 'IceBang API Documentation' + version = '0.0.1-alpha-snapshot' + format = 'yaml' +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/SecurityConfig.java b/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/SecurityConfig.java index 69ee1bf0..bde09f6e 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/SecurityConfig.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/SecurityConfig.java @@ -105,7 +105,7 @@ public PasswordEncoder bCryptPasswordEncoder() { String[] activeProfiles = environment.getActiveProfiles(); for (String profile : activeProfiles) { - if ("develop".equals(profile) || "test".equals(profile)) { + if ("develop".equals(profile) || profile.contains("test")) { return NoOpPasswordEncoder.getInstance(); } } diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/config/E2eTestConfiguration.java b/apps/user-service/src/test/java/com/gltkorea/icebang/config/E2eTestConfiguration.java index 054360b1..5b1c5ce9 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/config/E2eTestConfiguration.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/config/E2eTestConfiguration.java @@ -6,9 +6,14 @@ import org.springframework.test.context.DynamicPropertyRegistry; import org.springframework.test.context.DynamicPropertySource; import org.testcontainers.containers.MariaDBContainer; +import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper; @TestConfiguration(proxyBeanMethods = false) public class E2eTestConfiguration { + @Bean + public ObjectMapper objectMapper() { + return new ObjectMapper(); + } @Bean @ServiceConnection diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/config/RestDocsConfiguration.java b/apps/user-service/src/test/java/com/gltkorea/icebang/config/RestDocsConfiguration.java new file mode 100644 index 00000000..bdacc10d --- /dev/null +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/config/RestDocsConfiguration.java @@ -0,0 +1,29 @@ +package com.gltkorea.icebang.config; + +import org.springframework.boot.test.context.TestConfiguration; +import org.springframework.context.annotation.Bean; +import org.springframework.restdocs.mockmvc.MockMvcRestDocumentation; +import org.springframework.restdocs.mockmvc.RestDocumentationResultHandler; +import org.springframework.restdocs.operation.preprocess.Preprocessors; + +import com.fasterxml.jackson.databind.ObjectMapper; + +@TestConfiguration +public class RestDocsConfiguration { + + @Bean + public RestDocumentationResultHandler restDocumentationResultHandler() { + return MockMvcRestDocumentation.document( + "{class-name}/{method-name}", + Preprocessors.preprocessRequest( + Preprocessors.removeHeaders("Host", "Content-Length"), Preprocessors.prettyPrint()), + Preprocessors.preprocessResponse( + Preprocessors.removeHeaders("Content-Length", "Date", "Keep-Alive", "Connection"), + Preprocessors.prettyPrint())); + } + + @Bean + public ObjectMapper testObjectMapper() { + return new ObjectMapper(); + } +} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/domain/auth/controller/AuthControllerE2eTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/domain/auth/controller/AuthControllerE2eTest.java new file mode 100644 index 00000000..c5b184fd --- /dev/null +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/domain/auth/controller/AuthControllerE2eTest.java @@ -0,0 +1,82 @@ +package com.gltkorea.icebang.domain.auth.controller; + +import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; +import static com.epages.restdocs.apispec.ResourceDocumentation.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.restdocs.headers.HeaderDocumentation.*; +import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; +import static org.springframework.restdocs.operation.preprocess.Preprocessors.*; +import static org.springframework.restdocs.payload.PayloadDocumentation.*; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.http.*; +import org.springframework.restdocs.payload.JsonFieldType; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.jdbc.Sql; + +import com.epages.restdocs.apispec.ResourceSnippetParameters; +import com.gltkorea.icebang.support.E2eTestSupport; + +@Sql("classpath:sql/01-insert-internal-users.sql") +@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) +class AuthControllerE2eTest extends E2eTestSupport { + + @Test + @DisplayName("사용자 로그인 성공") + void login_success() throws Exception { + // given + Map loginRequest = new HashMap<>(); + loginRequest.put("email", "admin@icebang.site"); + loginRequest.put("password", "qwer1234!A"); + + // MockMvc로 REST Docs + OpenAPI 생성 + mockMvc + .perform( + post(getApiUrlForDocs("/v0/auth/login")) + .contentType(MediaType.APPLICATION_JSON) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/") + .content(objectMapper.writeValueAsString(loginRequest))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.status").value("OK")) + .andExpect(jsonPath("$.message").value("OK")) + .andExpect(jsonPath("$.data").isEmpty()) + .andDo( + document( + "auth-login", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Authentication") + .summary("사용자 로그인") + .description("이메일과 비밀번호로 사용자 인증을 수행합니다") + .requestFields( + fieldWithPath("email") + .type(JsonFieldType.STRING) + .description("사용자 이메일 주소"), + fieldWithPath("password") + .type(JsonFieldType.STRING) + .description("사용자 비밀번호")) + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부"), + fieldWithPath("data") + .type(JsonFieldType.NULL) + .description("응답 데이터 (로그인 성공 시 null)"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태")) + .build()))); + } +} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupport.java b/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupport.java index ddb3afd9..36156a83 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupport.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupport.java @@ -1,28 +1,60 @@ package com.gltkorea.icebang.support; +import static org.springframework.restdocs.mockmvc.MockMvcRestDocumentation.documentationConfiguration; +import static org.springframework.restdocs.operation.preprocess.Preprocessors.prettyPrint; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.boot.test.web.client.TestRestTemplate; import org.springframework.boot.test.web.server.LocalServerPort; import org.springframework.context.annotation.Import; +import org.springframework.restdocs.RestDocumentationContextProvider; +import org.springframework.restdocs.RestDocumentationExtension; +import org.springframework.test.web.servlet.MockMvc; +import org.springframework.test.web.servlet.setup.MockMvcBuilders; +import org.springframework.web.context.WebApplicationContext; +import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper; import com.gltkorea.icebang.annotation.E2eTest; import com.gltkorea.icebang.config.E2eTestConfiguration; @Import(E2eTestConfiguration.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) +@ExtendWith(RestDocumentationExtension.class) @E2eTest public abstract class E2eTestSupport { + @Autowired protected ObjectMapper objectMapper; @LocalServerPort protected int port; - @Autowired protected TestRestTemplate restTemplate; + @Autowired protected WebApplicationContext webApplicationContext; + + protected MockMvc mockMvc; + + @BeforeEach + void setUp(RestDocumentationContextProvider restDocumentation) { + // MockMvc 설정 (MockMvc 기반 테스트용) + this.mockMvc = + MockMvcBuilders.webAppContextSetup(webApplicationContext) + .apply( + documentationConfiguration(restDocumentation) + .operationPreprocessors() + .withRequestDefaults(prettyPrint()) + .withResponseDefaults(prettyPrint())) + .build(); + } protected String getBaseUrl() { return "http://localhost:" + port; } protected String getApiUrl(String path) { - return getBaseUrl() + "/api" + path; + return getBaseUrl() + path; + } + + /** REST Docs용 API URL 생성 (path parameter 포함) */ + protected String getApiUrlForDocs(String path) { + return path; } } diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupportTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupportTest.java index bad5a2ba..8b9da9b8 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupportTest.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupportTest.java @@ -11,19 +11,5 @@ void shouldStartWithRandomPort() { // 포트가 제대로 할당되었는지 확인 assertThat(port).isGreaterThan(0); assertThat(getBaseUrl()).startsWith("http://localhost:"); - assertThat(getApiUrl("/test")).contains("/api/test"); - } - - @Test - void shouldHaveRestTemplate() { - // RestTemplate이 주입되었는지 확인 - assertThat(restTemplate).isNotNull(); - } - - @Test - void shouldConnectToMariaDBContainer() { - // 실제 DB 연결 확인 - String response = restTemplate.getForObject(getApiUrl("/health"), String.class); - // health check endpoint가 있다면 사용, 없으면 간단한 컨트롤러 만들어서 테스트 } } From 681d31eeddd5009daec0828a3c723fd4733b3a0c Mon Sep 17 00:00:00 2001 From: can019 Date: Tue, 9 Sep 2025 21:38:19 +0900 Subject: [PATCH 15/31] =?UTF-8?q?feat:=20PR=EC=8B=9C=20integration=20test?= =?UTF-8?q?=20=ED=99=9C=EC=84=B1=ED=99=94?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/ci-java.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-java.yml b/.github/workflows/ci-java.yml index 91e57996..2505b59e 100644 --- a/.github/workflows/ci-java.yml +++ b/.github/workflows/ci-java.yml @@ -81,7 +81,7 @@ jobs: if [ "${{ github.base_ref }}" == "main" ]; then ./gradlew allTests else - ./gradlew test + ./gradlew allTests fi working-directory: apps/user-service From 7bf7c09a53f010a5a590ca4912d391850b88a61e Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Wed, 10 Sep 2025 02:16:26 +0900 Subject: [PATCH 16/31] =?UTF-8?q?=EC=9C=A0=EC=A0=80=20=ED=9A=8C=EC=9B=90?= =?UTF-8?q?=EA=B0=80=EC=9E=85=20mybatis=20query=20mapping=20=EB=B2=84?= =?UTF-8?q?=EA=B7=B8=20(#60)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: test dir 구조 개편 * chore: integrationTest annotation 및 support 정의 * fix: User register auth mapper xml 쿼리 param mismatch userOrgId -> orgId * test: fix context load test vis extend e2e support * chore: Unit test 비활성화 * chore: Integration test 일부 세팅 --- .../security/endpoints/SecurityEndpoints.java | 11 +- .../application-test-integration.yml | 51 +++ .../resources/mybatis/mapper/AuthMapper.xml | 2 +- .../mybatis/mapper/OrganizationMapper.xml | 8 +- .../icebang/DatabaseConnectionTest.java | 81 ----- .../icebang/UserServiceApplicationTests.java | 13 - .../icebang/controller/TestController.java | 15 - .../controller/AuthControllerE2eTest.java | 82 ----- .../icebang/{ => e2e}/annotation/E2eTest.java | 2 +- .../config/E2eTestConfiguration.java | 2 +- .../scenario/UserRegistrationFlowE2eTest.java | 299 ++++++++++++++++++ .../UserServiceApplicationE2eTests.java | 11 + .../icebang/e2e/support/E2eTestSupport.java | 60 ++++ .../{ => e2e}/support/E2eTestSupportTest.java | 4 +- .../annotation/IntegrationTest.java | 15 + .../icebang/integration/auth/testa.java | 287 +++++++++++++++++ .../config/RestDocsConfiguration.java | 2 +- .../support/IntegrationTestSupport.java | 13 + .../icebang/support/E2eTestSupport.java | 60 ---- .../icebang/support/UnitTestSupportTest.java | 41 --- .../{ => unit}/annotation/UnitTest.java | 2 +- .../{ => unit}/support/UnitTestSupport.java | 7 +- 22 files changed, 760 insertions(+), 308 deletions(-) create mode 100644 apps/user-service/src/main/resources/application-test-integration.yml delete mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/DatabaseConnectionTest.java delete mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/UserServiceApplicationTests.java delete mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/controller/TestController.java delete mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/domain/auth/controller/AuthControllerE2eTest.java rename apps/user-service/src/test/java/com/gltkorea/icebang/{ => e2e}/annotation/E2eTest.java (89%) rename apps/user-service/src/test/java/com/gltkorea/icebang/{ => e2e}/config/E2eTestConfiguration.java (97%) create mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java create mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserServiceApplicationE2eTests.java create mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/e2e/support/E2eTestSupport.java rename apps/user-service/src/test/java/com/gltkorea/icebang/{ => e2e}/support/E2eTestSupportTest.java (67%) create mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/integration/annotation/IntegrationTest.java create mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/integration/auth/testa.java rename apps/user-service/src/test/java/com/gltkorea/icebang/{ => integration}/config/RestDocsConfiguration.java (95%) create mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/integration/support/IntegrationTestSupport.java delete mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupport.java delete mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/support/UnitTestSupportTest.java rename apps/user-service/src/test/java/com/gltkorea/icebang/{ => unit}/annotation/UnitTest.java (89%) rename apps/user-service/src/test/java/com/gltkorea/icebang/{ => unit}/support/UnitTestSupport.java (50%) diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java b/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java index bc6eafe2..da658775 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java @@ -1,7 +1,16 @@ package com.gltkorea.icebang.config.security.endpoints; public enum SecurityEndpoints { - PUBLIC("/", "/v0/auth/login", "/api/public/**", "/health", "/css/**", "/js/**", "/images/**"), + PUBLIC( + "/", + "/v0/auth/login", + "/api/public/**", + "/health", + "/css/**", + "/js/**", + "/images/**", + "/v0/organizations/**", + "/v0/auth/register"), // 데이터 관리 관련 엔드포인트 DATA_ADMIN("/admin/**", "/api/admin/**", "/management/**", "/actuator/**"), diff --git a/apps/user-service/src/main/resources/application-test-integration.yml b/apps/user-service/src/main/resources/application-test-integration.yml new file mode 100644 index 00000000..95faf0f3 --- /dev/null +++ b/apps/user-service/src/main/resources/application-test-integration.yml @@ -0,0 +1,51 @@ +spring: + config: + activate: + on-profile: test-integration + + # H2 인메모리 데이터베이스 설정 (Unit Test용) + datasource: + url: jdbc:h2:mem:testdb;MODE=MariaDB;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=TRUE + username: sa + password: + driver-class-name: org.h2.Driver + hikari: + connection-init-sql: "SET MODE MariaDB" + connection-timeout: 30000 + idle-timeout: 600000 + max-lifetime: 1800000 + maximum-pool-size: 10 + minimum-idle: 5 + pool-name: HikariCP-MyBatis + + # H2 웹 콘솔 활성화 (디버깅용) + h2: + console: + enabled: true + + # JPA 설정 (H2용) + jpa: + hibernate: + ddl-auto: create-drop + show-sql: true + properties: + hibernate: + dialect: org.hibernate.dialect.H2Dialect + + # SQL 스크립트 초기화 설정 + sql: + init: + mode: always + schema-locations: + - classpath:sql/00-drop-h2.sql + - classpath:sql/01-schema.sql + encoding: UTF-8 + +mybatis: + mapper-locations: classpath:mybatis/mapper/**/*.xml + type-aliases-package: com.gltkorea.icebang.dto + configuration: + map-underscore-to-camel-case: true + +logging: + config: classpath:log4j2-test-unit.yml \ No newline at end of file diff --git a/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml index 0023c224..154dbb39 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml @@ -37,7 +37,7 @@ INSERT INTO user_organization (user_id, organization_id, department_id, position_id, status) - VALUES (#{id}, #{organizationId}, #{departmentId}, #{positionId}, #{status}); + VALUES (#{id}, #{orgId}, #{deptId}, #{positionId}, #{status}); diff --git a/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml index 6a8201b8..740b81a3 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml @@ -9,7 +9,7 @@ SELECT id, name as organizationName - FROM organizations + FROM organization ORDER BY name @@ -18,7 +18,7 @@ SELECT id, name - FROM departments + FROM department WHERE organization_id = #{organizationId} ORDER BY name @@ -28,7 +28,7 @@ SELECT id, title - FROM positions + FROM position WHERE organization_id = #{organizationId} ORDER BY title @@ -39,7 +39,7 @@ id, name, description - FROM roles + FROM role WHERE organization_id = #{organizationId} ORDER BY name diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/DatabaseConnectionTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/DatabaseConnectionTest.java deleted file mode 100644 index c15170cc..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/DatabaseConnectionTest.java +++ /dev/null @@ -1,81 +0,0 @@ -// package com.gltkorea.icebang; -// -// import static org.assertj.core.api.Assertions.assertThat; -// -// import java.sql.Connection; -// import java.sql.SQLException; -// import java.util.Optional; -// -// import javax.sql.DataSource; -// -// import org.junit.jupiter.api.DisplayName; -// import org.junit.jupiter.api.Test; -// import org.springframework.beans.factory.annotation.Autowired; -// import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase; -// import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase.Replace; -// import org.springframework.boot.test.context.SpringBootTest; -// import org.springframework.context.annotation.Import; -// import org.springframework.test.context.ActiveProfiles; -// import org.springframework.test.context.jdbc.Sql; -// import org.springframework.transaction.annotation.Transactional; -// -// import com.gltkorea.icebang.dto.UserDto; -// import com.gltkorea.icebang.mapper.UserMapper; -// -// @SpringBootTest -// @Import(TestcontainersConfiguration.class) -// @AutoConfigureTestDatabase(replace = Replace.NONE) -// @ActiveProfiles("test") // application-test-unit.yml 설정을 활성화 -// @Transactional // 테스트 후 데이터 롤백 -// @Sql( -// scripts = {"classpath:sql/create-01-schema.sql", "classpath:sql/insert-user-data.sql"}, -// executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) -// class DatabaseConnectionTest { -// -// @Autowired private DataSource dataSource; -// -// @Autowired private UserMapper userMapper; // JPA Repository 대신 MyBatis Mapper를 주입 -// -// @Test -// @DisplayName("DataSource를 통해 DB 커넥션을 성공적으로 얻을 수 있다.") -// void canGetDatabaseConnection() { -// try (Connection connection = dataSource.getConnection()) { -// assertThat(connection).isNotNull(); -// assertThat(connection.isValid(1)).isTrue(); -// System.out.println("DB Connection successful: " + connection.getMetaData().getURL()); -// } catch (SQLException e) { -// org.junit.jupiter.api.Assertions.fail("Failed to get database connection", e); -// } -// } -// -// @Test -// @DisplayName("MyBatis Mapper를 통해 '홍길동' 사용자를 이메일로 조회") -// void findUserByEmailWithMyBatis() { -// // given -// String testEmail = "hong.gildong@example.com"; -// -// // when -// Optional foundUser = userMapper.findByEmail(testEmail); -// -// // then -// // 사용자가 존재하고, 이름이 '홍길동'인지 확인 -// assertThat(foundUser).isPresent(); -// assertThat(foundUser.get().getName()).isEqualTo("홍길동"); -// System.out.println("Successfully found user with MyBatis: " + foundUser.get().getName()); -// } -// -// @Test -// @DisplayName("샘플 데이터가 올바르게 삽입되었는지 확인") -// void verifyAllSampleDataInserted() { -// // 사용자 데이터 확인 -// Optional hong = userMapper.findByEmail("hong.gildong@example.com"); -// assertThat(hong).isPresent(); -// assertThat(hong.get().getName()).isEqualTo("홍길동"); -// -// Optional kim = userMapper.findByEmail("kim.chulsu@example.com"); -// assertThat(kim).isPresent(); -// assertThat(kim.get().getName()).isEqualTo("김철수"); -// -// System.out.println("샘플 데이터 삽입 성공 - 홍길동, 김철수 확인"); -// } -// } diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/UserServiceApplicationTests.java b/apps/user-service/src/test/java/com/gltkorea/icebang/UserServiceApplicationTests.java deleted file mode 100644 index 26cfc86b..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/UserServiceApplicationTests.java +++ /dev/null @@ -1,13 +0,0 @@ -package com.gltkorea.icebang; - -import org.junit.jupiter.api.Test; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.context.annotation.Import; - -@Import(TestcontainersConfiguration.class) -@SpringBootTest -class UserServiceApplicationTests { - - @Test - void contextLoads() {} -} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/controller/TestController.java b/apps/user-service/src/test/java/com/gltkorea/icebang/controller/TestController.java deleted file mode 100644 index c29707ce..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/controller/TestController.java +++ /dev/null @@ -1,15 +0,0 @@ -package com.gltkorea.icebang.controller; - -import org.springframework.boot.test.context.TestComponent; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.RestController; - -@TestComponent -@RestController -public class TestController { - - @GetMapping("/api/health") - public String health() { - return "OK"; - } -} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/domain/auth/controller/AuthControllerE2eTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/domain/auth/controller/AuthControllerE2eTest.java deleted file mode 100644 index c5b184fd..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/domain/auth/controller/AuthControllerE2eTest.java +++ /dev/null @@ -1,82 +0,0 @@ -package com.gltkorea.icebang.domain.auth.controller; - -import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; -import static com.epages.restdocs.apispec.ResourceDocumentation.*; -import static org.assertj.core.api.Assertions.*; -import static org.springframework.restdocs.headers.HeaderDocumentation.*; -import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; -import static org.springframework.restdocs.operation.preprocess.Preprocessors.*; -import static org.springframework.restdocs.payload.PayloadDocumentation.*; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; - -import java.util.HashMap; -import java.util.Map; - -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; -import org.springframework.http.*; -import org.springframework.restdocs.payload.JsonFieldType; -import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.jdbc.Sql; - -import com.epages.restdocs.apispec.ResourceSnippetParameters; -import com.gltkorea.icebang.support.E2eTestSupport; - -@Sql("classpath:sql/01-insert-internal-users.sql") -@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) -class AuthControllerE2eTest extends E2eTestSupport { - - @Test - @DisplayName("사용자 로그인 성공") - void login_success() throws Exception { - // given - Map loginRequest = new HashMap<>(); - loginRequest.put("email", "admin@icebang.site"); - loginRequest.put("password", "qwer1234!A"); - - // MockMvc로 REST Docs + OpenAPI 생성 - mockMvc - .perform( - post(getApiUrlForDocs("/v0/auth/login")) - .contentType(MediaType.APPLICATION_JSON) - .header("Origin", "https://admin.icebang.site") - .header("Referer", "https://admin.icebang.site/") - .content(objectMapper.writeValueAsString(loginRequest))) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.success").value(true)) - .andExpect(jsonPath("$.status").value("OK")) - .andExpect(jsonPath("$.message").value("OK")) - .andExpect(jsonPath("$.data").isEmpty()) - .andDo( - document( - "auth-login", - preprocessRequest(prettyPrint()), - preprocessResponse(prettyPrint()), - resource( - ResourceSnippetParameters.builder() - .tag("Authentication") - .summary("사용자 로그인") - .description("이메일과 비밀번호로 사용자 인증을 수행합니다") - .requestFields( - fieldWithPath("email") - .type(JsonFieldType.STRING) - .description("사용자 이메일 주소"), - fieldWithPath("password") - .type(JsonFieldType.STRING) - .description("사용자 비밀번호")) - .responseFields( - fieldWithPath("success") - .type(JsonFieldType.BOOLEAN) - .description("요청 성공 여부"), - fieldWithPath("data") - .type(JsonFieldType.NULL) - .description("응답 데이터 (로그인 성공 시 null)"), - fieldWithPath("message") - .type(JsonFieldType.STRING) - .description("응답 메시지"), - fieldWithPath("status") - .type(JsonFieldType.STRING) - .description("HTTP 상태")) - .build()))); - } -} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/annotation/E2eTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/annotation/E2eTest.java similarity index 89% rename from apps/user-service/src/test/java/com/gltkorea/icebang/annotation/E2eTest.java rename to apps/user-service/src/test/java/com/gltkorea/icebang/e2e/annotation/E2eTest.java index 43290a4a..0840a996 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/annotation/E2eTest.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/annotation/E2eTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.annotation; +package com.gltkorea.icebang.e2e.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/config/E2eTestConfiguration.java b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/config/E2eTestConfiguration.java similarity index 97% rename from apps/user-service/src/test/java/com/gltkorea/icebang/config/E2eTestConfiguration.java rename to apps/user-service/src/test/java/com/gltkorea/icebang/e2e/config/E2eTestConfiguration.java index 5b1c5ce9..7ebe181d 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/config/E2eTestConfiguration.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/config/E2eTestConfiguration.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.config; +package com.gltkorea.icebang.e2e.config; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.testcontainers.service.connection.ServiceConnection; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java new file mode 100644 index 00000000..f0fd3244 --- /dev/null +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java @@ -0,0 +1,299 @@ +package com.gltkorea.icebang.e2e.scenario; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.http.*; +import org.springframework.test.context.jdbc.Sql; + +import com.gltkorea.icebang.e2e.support.E2eTestSupport; + +@Sql( + value = "classpath:sql/01-insert-internal-users.sql", + executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS) +@DisplayName("사용자 등록 플로우 E2E 테스트") +class UserRegistrationFlowE2eTest extends E2eTestSupport { + + @SuppressWarnings("unchecked") + @Test + @DisplayName("관리자가 새 사용자를 등록하는 전체 플로우 (ERP 시나리오)") + void completeUserRegistrationFlow() throws Exception { + logStep(1, "관리자 로그인 (최우선)"); + + // 1. 관리자 로그인 (ERP에서 모든 작업의 선행 조건) + Map loginRequest = new HashMap<>(); + loginRequest.put("email", "admin@icebang.site"); + loginRequest.put("password", "qwer1234!A"); + + HttpHeaders loginHeaders = new HttpHeaders(); + loginHeaders.setContentType(MediaType.APPLICATION_JSON); + loginHeaders.set("Origin", "https://admin.icebang.site"); + loginHeaders.set("Referer", "https://admin.icebang.site/"); + + HttpEntity> loginEntity = new HttpEntity<>(loginRequest, loginHeaders); + + ResponseEntity loginResponse = + restTemplate.postForEntity(getV0ApiUrl("/auth/login"), loginEntity, Map.class); + + assertThat(loginResponse.getStatusCode()).isEqualTo(HttpStatus.OK); + assertThat((Boolean) loginResponse.getBody().get("success")).isTrue(); + + logSuccess("관리자 로그인 성공 - 이제 모든 리소스 접근 가능"); + + logStep(2, "조직 목록 조회 (인증된 상태)"); + + // 2. 조직 목록 조회 (로그인 후 가능) + ResponseEntity organizationsResponse = + restTemplate.getForEntity(getV0ApiUrl("/organizations"), Map.class); + + assertThat(organizationsResponse.getStatusCode()).isEqualTo(HttpStatus.OK); + assertThat((Boolean) organizationsResponse.getBody().get("success")).isTrue(); + assertThat(organizationsResponse.getBody().get("data")).isNotNull(); + + logSuccess("조직 목록 조회 성공"); + + logStep(3, "부서 및 각종 데이터 조회 (특정 조직 옵션)"); + + // 3. 특정 조직의 부서, 직급, 역할 데이터 조회 + ResponseEntity optionsResponse = + restTemplate.getForEntity(getV0ApiUrl("/organizations/1/options"), Map.class); + + assertThat(optionsResponse.getStatusCode()).isEqualTo(HttpStatus.OK); + assertThat((Boolean) optionsResponse.getBody().get("success")).isTrue(); + + Map optionData = (Map) optionsResponse.getBody().get("data"); + assertThat(optionData.get("departments")).isNotNull(); + assertThat(optionData.get("positions")).isNotNull(); + assertThat(optionData.get("roles")).isNotNull(); + + logSuccess("부서 및 각종 데이터 조회 성공"); + + // 조회된 데이터 로깅 (ERP 관점에서 중요한 메타데이터) + System.out.println("📊 조회된 메타데이터:"); + System.out.println( + " - 부서: " + ((java.util.List) optionData.get("departments")).size() + "개"); + System.out.println( + " - 직급: " + ((java.util.List) optionData.get("positions")).size() + "개"); + System.out.println(" - 역할: " + ((java.util.List) optionData.get("roles")).size() + "개"); + + logStep(4, "새 사용자 등록 (모든 메타데이터 확인 후)"); + + // 4. 새 사용자 등록 (조회한 메타데이터 기반으로) + Map registerRequest = new HashMap<>(); + registerRequest.put("name", "김철수"); + registerRequest.put("email", "kim.chulsoo@example.com"); + registerRequest.put("orgId", 1); + registerRequest.put("deptId", 2); // 조회한 부서 정보 기반 + registerRequest.put("positionId", 5); // 조회한 직급 정보 기반 + registerRequest.put("roleIds", Arrays.asList(6, 7, 8)); // 조회한 역할 정보 기반 + registerRequest.put("password", null); + + HttpHeaders registerHeaders = new HttpHeaders(); + registerHeaders.setContentType(MediaType.APPLICATION_JSON); + registerHeaders.set("Origin", "https://admin.icebang.site"); + registerHeaders.set("Referer", "https://admin.icebang.site/"); + + HttpEntity> registerEntity = + new HttpEntity<>(registerRequest, registerHeaders); + + ResponseEntity registerResponse = + restTemplate.postForEntity(getV0ApiUrl("/auth/register"), registerEntity, Map.class); + + assertThat(registerResponse.getStatusCode()).isEqualTo(HttpStatus.CREATED); + assertThat((Boolean) registerResponse.getBody().get("success")).isTrue(); + + logSuccess("새 사용자 등록 성공"); + logSuccess( + "등록된 사용자: " + registerRequest.get("name") + " (" + registerRequest.get("email") + ")"); + + logCompletion("ERP 사용자 등록 플로우"); + } + + @Disabled + @DisplayName("로그인 없이 리소스 접근 시 모든 요청 차단") + void accessResourcesWithoutLogin_shouldFailForAll() { + logStep(1, "인증 없이 조직 목록 조회 시도"); + + // 1. 로그인 없이 조직 목록 조회 시도 + ResponseEntity orgResponse = + restTemplate.getForEntity(getV0ApiUrl("/organizations"), Map.class); + + assertThat(orgResponse.getStatusCode()).isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN); + logSuccess("미인증 조직 조회 차단 확인"); + + logStep(2, "인증 없이 조직 옵션 조회 시도"); + + // 2. 로그인 없이 조직 옵션 조회 시도 + ResponseEntity optResponse = + restTemplate.getForEntity(getV0ApiUrl("/organizations/1/options"), Map.class); + + assertThat(optResponse.getStatusCode()).isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN); + logSuccess("미인증 옵션 조회 차단 확인"); + + logStep(3, "인증 없이 회원가입 시도"); + + // 3. 로그인 없이 회원가입 시도 + Map registerRequest = new HashMap<>(); + registerRequest.put("name", "테스트사용자"); + registerRequest.put("email", "test@example.com"); + registerRequest.put("orgId", 1); + registerRequest.put("deptId", 2); + registerRequest.put("positionId", 5); + registerRequest.put("roleIds", Arrays.asList(6)); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> entity = new HttpEntity<>(registerRequest, headers); + + ResponseEntity regResponse = + restTemplate.postForEntity(getV0ApiUrl("/auth/register"), entity, Map.class); + + assertThat(regResponse.getStatusCode()).isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN); + logSuccess("미인증 회원가입 차단 확인"); + + logCompletion("ERP 보안 검증"); + } + + @Test + @DisplayName("잘못된 자격증명으로 로그인 시도 시 실패") + void loginWithInvalidCredentials_shouldFail() { + logStep(1, "잘못된 비밀번호로 로그인 시도"); + + Map wrongPasswordRequest = new HashMap<>(); + wrongPasswordRequest.put("email", "admin@icebang.site"); + wrongPasswordRequest.put("password", "wrongpassword"); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> entity = new HttpEntity<>(wrongPasswordRequest, headers); + + ResponseEntity response = + restTemplate.postForEntity(getV0ApiUrl("/auth/login"), entity, Map.class); + + assertThat(response.getStatusCode()).isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN); + logSuccess("잘못된 자격증명 로그인 차단 확인"); + + logStep(2, "존재하지 않는 사용자로 로그인 시도"); + + Map nonExistentUserRequest = new HashMap<>(); + nonExistentUserRequest.put("email", "nonexistent@example.com"); + nonExistentUserRequest.put("password", "anypassword"); + + HttpEntity> nonExistentEntity = + new HttpEntity<>(nonExistentUserRequest, headers); + + ResponseEntity nonExistentResponse = + restTemplate.postForEntity(getV0ApiUrl("/auth/login"), nonExistentEntity, Map.class); + + assertThat(nonExistentResponse.getStatusCode()) + .isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN); + logSuccess("존재하지 않는 사용자 로그인 차단 확인"); + } + + @SuppressWarnings("unchecked") + @Disabled + @DisplayName("중복 이메일로 사용자 등록 시도 시 실패") + void register_withDuplicateEmail_shouldFail() { + // 선행 조건: 관리자 로그인 + performAdminLogin(); + + // 첫 번째 사용자 등록 (실제 API 데이터 기반) + registerUser("first.user@example.com", "첫번째사용자"); + + logStep(1, "중복 이메일로 회원가입 시도"); + + // 조직 및 옵션 정보 다시 조회 (실제 값 사용) + ResponseEntity organizationsResponse = + restTemplate.getForEntity(getV0ApiUrl("/organizations"), Map.class); + java.util.List> organizations = + (java.util.List>) organizationsResponse.getBody().get("data"); + Integer orgId = (Integer) organizations.getFirst().get("id"); + + ResponseEntity optionsResponse = + restTemplate.getForEntity(getV0ApiUrl("/organizations/" + orgId + "/options"), Map.class); + Map optionData = (Map) optionsResponse.getBody().get("data"); + + java.util.List> departments = + (java.util.List>) optionData.get("departments"); + java.util.List> positions = + (java.util.List>) optionData.get("positions"); + java.util.List> roles = + (java.util.List>) optionData.get("roles"); + + Integer deptId = (Integer) departments.getFirst().get("id"); + Integer positionId = (Integer) positions.getFirst().get("id"); + Integer roleId = (Integer) roles.getFirst().get("id"); + + // 동일한 이메일로 다시 등록 시도 + Map duplicateRequest = new HashMap<>(); + duplicateRequest.put("name", "중복사용자"); + duplicateRequest.put("email", "first.user@example.com"); // 중복 이메일 + duplicateRequest.put("orgId", orgId); + duplicateRequest.put("deptId", deptId); + duplicateRequest.put("positionId", positionId); + duplicateRequest.put("roleIds", Collections.singletonList(roleId)); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> entity = new HttpEntity<>(duplicateRequest, headers); + + ResponseEntity response = + restTemplate.postForEntity(getV0ApiUrl("/auth/register"), entity, Map.class); + + // 중복 이메일 처리 확인 + assertThat(response.getStatusCode()) + .isIn(HttpStatus.BAD_REQUEST, HttpStatus.CONFLICT, HttpStatus.UNPROCESSABLE_ENTITY); + + logSuccess("중복 이메일 등록 차단 확인"); + } + + /** 관리자 로그인을 수행하는 헬퍼 메서드 */ + private void performAdminLogin() { + Map loginRequest = new HashMap<>(); + loginRequest.put("email", "admin@icebang.site"); + loginRequest.put("password", "qwer1234!A"); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> entity = new HttpEntity<>(loginRequest, headers); + + ResponseEntity response = + restTemplate.postForEntity(getV0ApiUrl("/auth/login"), entity, Map.class); + + if (response.getStatusCode() != HttpStatus.OK) { + logError("관리자 로그인 실패: " + response.getStatusCode()); + throw new RuntimeException("Admin login failed"); + } + + logSuccess("관리자 로그인 완료"); + } + + /** 사용자 등록을 수행하는 헬퍼 메서드 */ + private void registerUser(String email, String name) { + Map registerRequest = new HashMap<>(); + registerRequest.put("name", name); + registerRequest.put("email", email); + registerRequest.put("orgId", 1); + registerRequest.put("deptId", 2); + registerRequest.put("positionId", 5); + registerRequest.put("roleIds", Arrays.asList(6, 7, 8)); + registerRequest.put("password", null); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> entity = new HttpEntity<>(registerRequest, headers); + restTemplate.postForEntity(getV0ApiUrl("/auth/register"), entity, Map.class); + } +} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserServiceApplicationE2eTests.java b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserServiceApplicationE2eTests.java new file mode 100644 index 00000000..2379e450 --- /dev/null +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserServiceApplicationE2eTests.java @@ -0,0 +1,11 @@ +package com.gltkorea.icebang.e2e.scenario; + +import org.junit.jupiter.api.Test; + +import com.gltkorea.icebang.e2e.support.E2eTestSupport; + +class UserServiceApplicationE2eTests extends E2eTestSupport { + + @Test + void contextLoads() {} +} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/support/E2eTestSupport.java b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/support/E2eTestSupport.java new file mode 100644 index 00000000..12a44848 --- /dev/null +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/support/E2eTestSupport.java @@ -0,0 +1,60 @@ +package com.gltkorea.icebang.e2e.support; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.web.client.TestRestTemplate; +import org.springframework.boot.test.web.server.LocalServerPort; +import org.springframework.context.annotation.Import; +import org.springframework.test.web.servlet.MockMvc; +import org.springframework.web.context.WebApplicationContext; +import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper; + +import com.gltkorea.icebang.e2e.annotation.E2eTest; +import com.gltkorea.icebang.e2e.config.E2eTestConfiguration; + +@Import(E2eTestConfiguration.class) +@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) +@E2eTest +public abstract class E2eTestSupport { + @Autowired protected TestRestTemplate restTemplate; + + @Autowired protected ObjectMapper objectMapper; + + @LocalServerPort protected int port; + + @Autowired protected WebApplicationContext webApplicationContext; + + protected MockMvc mockMvc; + + protected String getBaseUrl() { + return "http://localhost:" + port; + } + + protected String getApiUrl(String path) { + return getBaseUrl() + path; + } + + protected String getV0ApiUrl(String path) { + return getBaseUrl() + "/v0" + path; + } + + /** 테스트 시나리오 단계별 로깅을 위한 유틸리티 메서드 */ + protected void logStep(int stepNumber, String description) { + System.out.println(String.format("📋 Step %d: %s", stepNumber, description)); + } + + /** 테스트 성공 로깅을 위한 유틸리티 메서드 */ + protected void logSuccess(String message) { + System.out.println("✅ " + message); + } + + /** 테스트 실패 로깅을 위한 유틸리티 메서드 */ + protected void logError(String message) { + System.out.println("❌ " + message); + } + + /** 테스트 완료 로깅을 위한 유틸리티 메서드 */ + protected void logCompletion(String scenario) { + System.out.println(String.format("🎉 %s 시나리오 완료!", scenario)); + } +} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupportTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/support/E2eTestSupportTest.java similarity index 67% rename from apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupportTest.java rename to apps/user-service/src/test/java/com/gltkorea/icebang/e2e/support/E2eTestSupportTest.java index 8b9da9b8..7eccdd4e 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupportTest.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/support/E2eTestSupportTest.java @@ -1,9 +1,11 @@ -package com.gltkorea.icebang.support; +package com.gltkorea.icebang.e2e.support; import static org.assertj.core.api.Assertions.assertThat; import org.junit.jupiter.api.Test; +import org.springframework.test.annotation.DirtiesContext; +@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) class E2eTestSupportTest extends E2eTestSupport { @Test diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/annotation/IntegrationTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/annotation/IntegrationTest.java new file mode 100644 index 00000000..ca4e4046 --- /dev/null +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/annotation/IntegrationTest.java @@ -0,0 +1,15 @@ +package com.gltkorea.icebang.integration.annotation; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.Tag; +import org.springframework.test.context.ActiveProfiles; + +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Tag("integration") +@ActiveProfiles("test-integration") +public @interface IntegrationTest {} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/auth/testa.java b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/auth/testa.java new file mode 100644 index 00000000..9f273f56 --- /dev/null +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/auth/testa.java @@ -0,0 +1,287 @@ +// package com.gltkorea.icebang.e2e.scenario; +// +// import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; +// import static com.epages.restdocs.apispec.ResourceDocumentation.*; +// import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; +// import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; +// import static org.springframework.restdocs.operation.preprocess.Preprocessors.*; +// import static org.springframework.restdocs.payload.PayloadDocumentation.*; +// import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; +// +// import java.util.Arrays; +// import java.util.HashMap; +// import java.util.Map; +// +// import org.junit.jupiter.api.DisplayName; +// import org.junit.jupiter.api.Test; +// import org.springframework.http.*; +// import org.springframework.restdocs.payload.JsonFieldType; +// import org.springframework.test.annotation.DirtiesContext; +// import org.springframework.test.context.jdbc.Sql; +// +// import com.epages.restdocs.apispec.ResourceSnippetParameters; +// import com.gltkorea.icebang.e2e.support.E2eTestSupport; +// +// @Sql("classpath:sql/01-insert-internal-users.sql") +// @DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) +// class UserRegistrationFlowE2eTest extends E2eTestSupport { +// +// @Test +// @DisplayName("조직 목록 조회 성공") +// void getOrganizations_success() throws Exception { +// mockMvc +// .perform( +// get(getApiUrlForDocs("/v0/organizations")) +// .contentType(MediaType.APPLICATION_JSON) +// .header("Origin", "https://admin.icebang.site") +// .header("Referer", "https://admin.icebang.site/")) +// .andExpect(status().isOk()) +// .andExpect(jsonPath("$.success").value(true)) +// .andExpect(jsonPath("$.status").value("OK")) +// .andExpect(jsonPath("$.message").value("OK")) +// .andExpect(jsonPath("$.data").isArray()) +// .andDo( +// document( +// "organizations-list", +// preprocessRequest(prettyPrint()), +// preprocessResponse(prettyPrint()), +// resource( +// ResourceSnippetParameters.builder() +// .tag("Organization") +// .summary("조직 목록 조회") +// .description("시스템에 등록된 모든 조직의 목록을 조회합니다") +// .responseFields( +// fieldWithPath("success") +// .type(JsonFieldType.BOOLEAN) +// .description("요청 성공 여부"), +// +// fieldWithPath("data[]").type(JsonFieldType.ARRAY).description("조직 목록"), +// fieldWithPath("data[].id") +// .type(JsonFieldType.NUMBER) +// .description("조직 ID"), +// fieldWithPath("data[].organizationName") +// .type(JsonFieldType.STRING) +// .description("조직명"), +// fieldWithPath("message") +// .type(JsonFieldType.STRING) +// .description("응답 메시지"), +// fieldWithPath("status") +// .type(JsonFieldType.STRING) +// .description("HTTP 상태")) +// .build()))); +// } +// +// @Test +// @DisplayName("조직별 옵션 조회 성공") +// void getOrganizationOptions_success() throws Exception { +// mockMvc +// .perform( +// get(getApiUrlForDocs("/v0/organizations/{orgId}/options"), 1) +// .contentType(MediaType.APPLICATION_JSON) +// .header("Origin", "https://admin.icebang.site") +// .header("Referer", "https://admin.icebang.site/")) +// .andExpect(status().isOk()) +// .andExpect(jsonPath("$.success").value(true)) +// .andExpect(jsonPath("$.status").value("OK")) +// .andExpect(jsonPath("$.message").value("OK")) +// .andExpect(jsonPath("$.data.departments").isArray()) +// .andExpect(jsonPath("$.data.positions").isArray()) +// .andExpect(jsonPath("$.data.roles").isArray()) +// .andDo( +// document( +// "organization-options", +// preprocessRequest(prettyPrint()), +// preprocessResponse(prettyPrint()), +// resource( +// ResourceSnippetParameters.builder() +// .tag("Organization") +// .summary("조직별 옵션 조회") +// .description("특정 조직의 부서, 직급, 역할 정보를 조회합니다") +// .responseFields( +// fieldWithPath("success") +// .type(JsonFieldType.BOOLEAN) +// .description("요청 성공 여부"), +// fieldWithPath("data") +// .type(JsonFieldType.OBJECT) +// .description("조직 옵션 데이터"), +// fieldWithPath("data.departments[]") +// .type(JsonFieldType.ARRAY) +// .description("부서 목록"), +// fieldWithPath("data.departments[].id") +// .type(JsonFieldType.NUMBER) +// .description("부서 ID"), +// fieldWithPath("data.departments[].name") +// .type(JsonFieldType.STRING) +// .description("부서명"), +// fieldWithPath("data.positions[]") +// .type(JsonFieldType.ARRAY) +// .description("직급 목록"), +// fieldWithPath("data.positions[].id") +// .type(JsonFieldType.NUMBER) +// .description("직급 ID"), +// fieldWithPath("data.positions[].title") +// .type(JsonFieldType.STRING) +// .description("직급명"), +// fieldWithPath("data.roles[]") +// .type(JsonFieldType.ARRAY) +// .description("역할 목록"), +// fieldWithPath("data.roles[].id") +// .type(JsonFieldType.NUMBER) +// .description("역할 ID"), +// fieldWithPath("data.roles[].name") +// .type(JsonFieldType.STRING) +// .description("역할 코드명"), +// fieldWithPath("data.roles[].description") +// .type(JsonFieldType.STRING) +// .description("역할 설명"), +// fieldWithPath("message") +// .type(JsonFieldType.STRING) +// .description("응답 메시지"), +// fieldWithPath("status") +// .type(JsonFieldType.STRING) +// .description("HTTP 상태")) +// .build()))); +// } +// +// @Test +// @DisplayName("사용자 로그인 성공") +// void login_success() throws Exception { +// // given +// Map loginRequest = new HashMap<>(); +// loginRequest.put("email", "admin@icebang.site"); +// loginRequest.put("password", "qwer1234!A"); +// +// // MockMvc로 REST Docs + OpenAPI 생성 +// mockMvc +// .perform( +// post(getApiUrlForDocs("/v0/auth/login")) +// .contentType(MediaType.APPLICATION_JSON) +// .header("Origin", "https://admin.icebang.site") +// .header("Referer", "https://admin.icebang.site/") +// .content(objectMapper.writeValueAsString(loginRequest))) +// .andExpect(status().isOk()) +// .andExpect(jsonPath("$.success").value(true)) +// .andExpect(jsonPath("$.status").value("OK")) +// .andExpect(jsonPath("$.message").value("OK")) +// .andExpect(jsonPath("$.data").isEmpty()) +// .andDo( +// document( +// "auth-login", +// preprocessRequest(prettyPrint()), +// preprocessResponse(prettyPrint()), +// resource( +// ResourceSnippetParameters.builder() +// .tag("Authentication") +// .summary("사용자 로그인") +// .description("이메일과 비밀번호로 사용자 인증을 수행합니다") +// .requestFields( +// fieldWithPath("email") +// .type(JsonFieldType.STRING) +// .description("사용자 이메일 주소"), +// fieldWithPath("password") +// .type(JsonFieldType.STRING) +// .description("사용자 비밀번호")) +// .responseFields( +// fieldWithPath("success") +// .type(JsonFieldType.BOOLEAN) +// .description("요청 성공 여부"), +// fieldWithPath("data") +// .type(JsonFieldType.NULL) +// .description("응답 데이터 (로그인 성공 시 +// null)"), +// fieldWithPath("message") +// .type(JsonFieldType.STRING) +// .description("응답 메시지"), +// fieldWithPath("status") +// .type(JsonFieldType.STRING) +// .description("HTTP 상태")) +// .build()))); +// } +// +// @Test +// @DisplayName("사용자 회원가입 성공") +// void register_success() throws Exception { +// // given - 먼저 로그인하여 인증 토큰 획득 +// Map loginRequest = new HashMap<>(); +// loginRequest.put("email", "admin@icebang.site"); +// loginRequest.put("password", "qwer1234!A"); +// +// // 로그인 수행 (실제 환경에서는 토큰을 헤더에 추가해야 할 수 있음) +// mockMvc +// .perform( +// post("/v0/auth/login") +// .contentType(MediaType.APPLICATION_JSON) +// .content(objectMapper.writeValueAsString(loginRequest))) +// .andExpect(status().isOk()); +// +// // 회원가입 요청 데이터 +// Map registerRequest = new HashMap<>(); +// registerRequest.put("name", "김철수"); +// registerRequest.put("email", "kim.chulsoo@example.com"); +// registerRequest.put("orgId", 1); +// registerRequest.put("deptId", 2); +// registerRequest.put("positionId", 5); +// registerRequest.put("roleIds", Arrays.asList(6, 7, 8)); +// registerRequest.put("password", null); +// +// // when & then +// mockMvc +// .perform( +// post(getApiUrlForDocs("/v0/auth/register")) +// .contentType(MediaType.APPLICATION_JSON) +// .header("Origin", "https://admin.icebang.site") +// .header("Referer", "https://admin.icebang.site/") +// .content(objectMapper.writeValueAsString(registerRequest))) +// .andExpect(status().isOk()) +// .andExpect(jsonPath("$.success").value(true)) +// .andExpect(jsonPath("$.status").value("OK")) +// .andExpect(jsonPath("$.message").value("OK")) +// .andDo( +// document( +// "auth-register", +// preprocessRequest(prettyPrint()), +// preprocessResponse(prettyPrint()), +// resource( +// ResourceSnippetParameters.builder() +// .tag("Authentication") +// .summary("사용자 회원가입") +// .description("새로운 사용자를 등록합니다. 관리자 로그인 후에만 사용 +// 가능합니다.") +// .requestFields( +// +// fieldWithPath("name").type(JsonFieldType.STRING).description("사용자 이름"), +// fieldWithPath("email") +// .type(JsonFieldType.STRING) +// .description("사용자 이메일 주소"), +// +// fieldWithPath("orgId").type(JsonFieldType.NUMBER).description("조직 ID"), +// +// fieldWithPath("deptId").type(JsonFieldType.NUMBER).description("부서 ID"), +// fieldWithPath("positionId") +// .type(JsonFieldType.NUMBER) +// .description("직급 ID"), +// fieldWithPath("roleIds[]") +// .type(JsonFieldType.ARRAY) +// .description("역할 ID 목록"), +// fieldWithPath("password") +// .type(JsonFieldType.NULL) +// .description("비밀번호 (null인 경우 시스템에서 +// 자동 생성)") +// .optional()) +// .responseFields( +// fieldWithPath("success") +// .type(JsonFieldType.BOOLEAN) +// .description("요청 성공 여부"), +// fieldWithPath("data") +// .type(JsonFieldType.VARIES) +// .description("응답 데이터 (회원가입 결과 +// 정보)"), +// fieldWithPath("message") +// .type(JsonFieldType.STRING) +// .description("응답 메시지"), +// fieldWithPath("status") +// .type(JsonFieldType.STRING) +// .description("HTTP 상태")) +// .build()))); +// } +// } diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/config/RestDocsConfiguration.java b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/config/RestDocsConfiguration.java similarity index 95% rename from apps/user-service/src/test/java/com/gltkorea/icebang/config/RestDocsConfiguration.java rename to apps/user-service/src/test/java/com/gltkorea/icebang/integration/config/RestDocsConfiguration.java index bdacc10d..319860ad 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/config/RestDocsConfiguration.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/config/RestDocsConfiguration.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.config; +package com.gltkorea.icebang.integration.config; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/support/IntegrationTestSupport.java b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/support/IntegrationTestSupport.java new file mode 100644 index 00000000..9eb0b356 --- /dev/null +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/support/IntegrationTestSupport.java @@ -0,0 +1,13 @@ +package com.gltkorea.icebang.integration.support; + +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.restdocs.RestDocumentationExtension; +import org.springframework.test.web.servlet.MockMvc; + +import com.gltkorea.icebang.integration.annotation.IntegrationTest; + +@IntegrationTest +@ExtendWith(RestDocumentationExtension.class) +public abstract class IntegrationTestSupport { + protected MockMvc mockMvc; +} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupport.java b/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupport.java deleted file mode 100644 index 36156a83..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/support/E2eTestSupport.java +++ /dev/null @@ -1,60 +0,0 @@ -package com.gltkorea.icebang.support; - -import static org.springframework.restdocs.mockmvc.MockMvcRestDocumentation.documentationConfiguration; -import static org.springframework.restdocs.operation.preprocess.Preprocessors.prettyPrint; - -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.extension.ExtendWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.boot.test.web.server.LocalServerPort; -import org.springframework.context.annotation.Import; -import org.springframework.restdocs.RestDocumentationContextProvider; -import org.springframework.restdocs.RestDocumentationExtension; -import org.springframework.test.web.servlet.MockMvc; -import org.springframework.test.web.servlet.setup.MockMvcBuilders; -import org.springframework.web.context.WebApplicationContext; -import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper; - -import com.gltkorea.icebang.annotation.E2eTest; -import com.gltkorea.icebang.config.E2eTestConfiguration; - -@Import(E2eTestConfiguration.class) -@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) -@ExtendWith(RestDocumentationExtension.class) -@E2eTest -public abstract class E2eTestSupport { - @Autowired protected ObjectMapper objectMapper; - - @LocalServerPort protected int port; - - @Autowired protected WebApplicationContext webApplicationContext; - - protected MockMvc mockMvc; - - @BeforeEach - void setUp(RestDocumentationContextProvider restDocumentation) { - // MockMvc 설정 (MockMvc 기반 테스트용) - this.mockMvc = - MockMvcBuilders.webAppContextSetup(webApplicationContext) - .apply( - documentationConfiguration(restDocumentation) - .operationPreprocessors() - .withRequestDefaults(prettyPrint()) - .withResponseDefaults(prettyPrint())) - .build(); - } - - protected String getBaseUrl() { - return "http://localhost:" + port; - } - - protected String getApiUrl(String path) { - return getBaseUrl() + path; - } - - /** REST Docs용 API URL 생성 (path parameter 포함) */ - protected String getApiUrlForDocs(String path) { - return path; - } -} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/support/UnitTestSupportTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/support/UnitTestSupportTest.java deleted file mode 100644 index 232a2c1f..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/support/UnitTestSupportTest.java +++ /dev/null @@ -1,41 +0,0 @@ -package com.gltkorea.icebang.support; - -import static org.assertj.core.api.Assertions.assertThat; - -import java.sql.Connection; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; - -import javax.sql.DataSource; - -import org.junit.jupiter.api.Test; -import org.springframework.beans.factory.annotation.Autowired; - -class UnitTestSupportTest extends UnitTestSupport { - - @Autowired private DataSource dataSource; - - @Test - void shouldUseH2DatabaseWithMariaDBMode() throws SQLException { - try (Connection connection = dataSource.getConnection()) { - String url = connection.getMetaData().getURL(); - assertThat(url).contains("h2:mem:testdb"); - - // MariaDB 모드 확인 - Statement stmt = connection.createStatement(); - ResultSet rs = - stmt.executeQuery( - "SELECT SETTING_VALUE FROM INFORMATION_SCHEMA.SETTINGS WHERE SETTING_NAME = 'MODE'"); - if (rs.next()) { - assertThat(rs.getString(1)).isEqualTo("MariaDB"); - } - } - } - - @Test - void shouldLoadApplicationContext() { - // Spring Context 로딩 확인 - assertThat(dataSource).isNotNull(); - } -} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/annotation/UnitTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/unit/annotation/UnitTest.java similarity index 89% rename from apps/user-service/src/test/java/com/gltkorea/icebang/annotation/UnitTest.java rename to apps/user-service/src/test/java/com/gltkorea/icebang/unit/annotation/UnitTest.java index 1927475a..117a5cb2 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/annotation/UnitTest.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/unit/annotation/UnitTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.annotation; +package com.gltkorea.icebang.unit.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/support/UnitTestSupport.java b/apps/user-service/src/test/java/com/gltkorea/icebang/unit/support/UnitTestSupport.java similarity index 50% rename from apps/user-service/src/test/java/com/gltkorea/icebang/support/UnitTestSupport.java rename to apps/user-service/src/test/java/com/gltkorea/icebang/unit/support/UnitTestSupport.java index 88c4315e..be4c8660 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/support/UnitTestSupport.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/unit/support/UnitTestSupport.java @@ -1,10 +1,7 @@ -package com.gltkorea.icebang.support; +package com.gltkorea.icebang.unit.support; -import org.springframework.boot.test.context.SpringBootTest; +import com.gltkorea.icebang.unit.annotation.UnitTest; -import com.gltkorea.icebang.annotation.UnitTest; - -@SpringBootTest @UnitTest public abstract class UnitTestSupport { From 571db090932eb9d0dce4f7203321c9ff2522b050 Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Wed, 10 Sep 2025 14:33:49 +0900 Subject: [PATCH 17/31] =?UTF-8?q?Organization=20api=20test=20=EB=B0=8F=20i?= =?UTF-8?q?ntegration=20test=20=EC=84=B8=ED=8C=85=20(#61)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: rename UserServiceApplicationE2eTest Changes to be committed: renamed: src/test/java/com/gltkorea/icebang/e2e/scenario/UserServiceApplicationE2eTests.java -> src/test/java/com/gltkorea/icebang/e2e/scenario/ContextLoadE2eTests.java * test: User login rest docs * chore: Test dir 구조화 * test: oranization api test * test: Integration test * chore: gradle allTests task 제거 * chore: Ci (java) test 수행 항목 조정 - main이 target인 경우 모든 test 수행 - 보통의 경우 unit, integration 수행 * fix: Main push 시 unit, integration test 수행 누락 * chore: echo로 test 수행 * chore: 실패 테스트 * chore: echo 비활성화 * chore: 실패 테스트 원복 --- .github/workflows/ci-java.yml | 7 +- apps/user-service/build.gradle | 23 +- .../email/service/MockEmailService.java | 2 +- .../application-test-integration.yml | 13 +- .../main/resources/application-test-unit.yml | 9 - .../src/main/resources/application.yml | 4 + .../e2e/scenario/ContextLoadE2eTests.java | 11 + .../scenario/UserRegistrationFlowE2eTest.java | 2 +- .../UserServiceApplicationE2eTests.java | 11 - .../e2e/{ => setup}/annotation/E2eTest.java | 2 +- .../config/E2eTestConfiguration.java | 2 +- .../{ => setup}/support/E2eTestSupport.java | 6 +- .../support/E2eTestSupportTest.java | 2 +- .../icebang/integration/auth/testa.java | 287 ------------------ .../annotation/IntegrationTest.java | 2 +- .../config/RestDocsConfiguration.java | 2 +- .../setup/support/IntegrationTestSupport.java | 35 +++ .../support/IntegrationTestSupport.java | 13 - .../tests/auth/AuthApiIntegrationTest.java | 81 +++++ .../OrganizationApiIntegrationTest.java | 170 +++++++++++ .../unit/{ => setup}/annotation/UnitTest.java | 2 +- .../{ => setup}/support/UnitTestSupport.java | 4 +- 22 files changed, 335 insertions(+), 355 deletions(-) create mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/ContextLoadE2eTests.java delete mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserServiceApplicationE2eTests.java rename apps/user-service/src/test/java/com/gltkorea/icebang/e2e/{ => setup}/annotation/E2eTest.java (88%) rename apps/user-service/src/test/java/com/gltkorea/icebang/e2e/{ => setup}/config/E2eTestConfiguration.java (97%) rename apps/user-service/src/test/java/com/gltkorea/icebang/e2e/{ => setup}/support/E2eTestSupport.java (91%) rename apps/user-service/src/test/java/com/gltkorea/icebang/e2e/{ => setup}/support/E2eTestSupportTest.java (90%) delete mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/integration/auth/testa.java rename apps/user-service/src/test/java/com/gltkorea/icebang/integration/{ => setup}/annotation/IntegrationTest.java (87%) rename apps/user-service/src/test/java/com/gltkorea/icebang/integration/{ => setup}/config/RestDocsConfiguration.java (94%) create mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/support/IntegrationTestSupport.java delete mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/integration/support/IntegrationTestSupport.java create mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/integration/tests/auth/AuthApiIntegrationTest.java create mode 100644 apps/user-service/src/test/java/com/gltkorea/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java rename apps/user-service/src/test/java/com/gltkorea/icebang/unit/{ => setup}/annotation/UnitTest.java (88%) rename apps/user-service/src/test/java/com/gltkorea/icebang/unit/{ => setup}/support/UnitTestSupport.java (60%) diff --git a/.github/workflows/ci-java.yml b/.github/workflows/ci-java.yml index 2505b59e..05006c2f 100644 --- a/.github/workflows/ci-java.yml +++ b/.github/workflows/ci-java.yml @@ -79,9 +79,12 @@ jobs: - name: Run Tests run: | if [ "${{ github.base_ref }}" == "main" ]; then - ./gradlew allTests + ./gradlew unitTest + ./gradlew integrationTest else - ./gradlew allTests + ./gradlew unitTest + ./gradlew integrationTest + ./gradlew e2eTest fi working-directory: apps/user-service diff --git a/apps/user-service/build.gradle b/apps/user-service/build.gradle index 45abf367..c76b49f2 100644 --- a/apps/user-service/build.gradle +++ b/apps/user-service/build.gradle @@ -91,15 +91,26 @@ ext { snippetsDir = file('build/generated-snippets') } -tasks.named('test') { +tasks.register('unitTest', Test) { outputs.dir snippetsDir useJUnitPlatform { - // 기본적으로는 e2e 태그 제외하고 실행 - excludeTags 'e2e' + includeTags 'unit' } + systemProperty 'spring.profiles.active', 'test-unit' } +tasks.register('integrationTest', Test) { + outputs.dir snippetsDir + useJUnitPlatform { + includeTags 'integration' + } + + systemProperty 'spring.profiles.active', 'test-integration' + + timeout = Duration.ofMinutes(10) +} + // E2E 테스트 전용 task 추가 tasks.register('e2eTest', Test) { outputs.dir snippetsDir @@ -113,12 +124,6 @@ tasks.register('e2eTest', Test) { timeout = Duration.ofMinutes(10) } -// 모든 테스트 실행 task -tasks.register('allTests', Test) { - outputs.dir snippetsDir - useJUnitPlatform() -} - // AsciiDoctor 설정 (REST Docs 문서 생성) asciidoctor { inputs.dir snippetsDir diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/MockEmailService.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/MockEmailService.java index 6ccaffc9..527bb752 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/MockEmailService.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/MockEmailService.java @@ -8,7 +8,7 @@ import lombok.extern.slf4j.Slf4j; @Service -@Profile({"test-unit", "test-e2e", "local", "develop"}) +@Profile({"test-unit", "test-e2e", "test-integration", "local", "develop"}) @Slf4j public class MockEmailService implements EmailService { diff --git a/apps/user-service/src/main/resources/application-test-integration.yml b/apps/user-service/src/main/resources/application-test-integration.yml index 95faf0f3..6625974a 100644 --- a/apps/user-service/src/main/resources/application-test-integration.yml +++ b/apps/user-service/src/main/resources/application-test-integration.yml @@ -10,7 +10,7 @@ spring: password: driver-class-name: org.h2.Driver hikari: - connection-init-sql: "SET MODE MariaDB" + connection-init-sql: "SET MODE MariaDB; SET NON_KEYWORDS USER;" connection-timeout: 30000 idle-timeout: 600000 max-lifetime: 1800000 @@ -23,15 +23,6 @@ spring: console: enabled: true - # JPA 설정 (H2용) - jpa: - hibernate: - ddl-auto: create-drop - show-sql: true - properties: - hibernate: - dialect: org.hibernate.dialect.H2Dialect - # SQL 스크립트 초기화 설정 sql: init: @@ -48,4 +39,4 @@ mybatis: map-underscore-to-camel-case: true logging: - config: classpath:log4j2-test-unit.yml \ No newline at end of file + config: classpath:log4j2-develop.yml \ No newline at end of file diff --git a/apps/user-service/src/main/resources/application-test-unit.yml b/apps/user-service/src/main/resources/application-test-unit.yml index 4b36c77f..cd4e018f 100644 --- a/apps/user-service/src/main/resources/application-test-unit.yml +++ b/apps/user-service/src/main/resources/application-test-unit.yml @@ -24,15 +24,6 @@ spring: console: enabled: true - # JPA 설정 (H2용) - jpa: - hibernate: - ddl-auto: create-drop - show-sql: true - properties: - hibernate: - dialect: org.hibernate.dialect.H2Dialect - # SQL 스크립트 초기화 설정 sql: init: diff --git a/apps/user-service/src/main/resources/application.yml b/apps/user-service/src/main/resources/application.yml index e852951b..c8314375 100644 --- a/apps/user-service/src/main/resources/application.yml +++ b/apps/user-service/src/main/resources/application.yml @@ -3,6 +3,10 @@ spring: name: mvp profiles: active: develop + test: + context: + cache: + maxSize: 1 mybatis: # Mapper XML 파일 위치 mapper-locations: classpath:mapper/**/*.xml diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/ContextLoadE2eTests.java b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/ContextLoadE2eTests.java new file mode 100644 index 00000000..ad6bfbf0 --- /dev/null +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/ContextLoadE2eTests.java @@ -0,0 +1,11 @@ +package com.gltkorea.icebang.e2e.scenario; + +import org.junit.jupiter.api.Test; + +import com.gltkorea.icebang.e2e.setup.support.E2eTestSupport; + +class ContextLoadE2eTests extends E2eTestSupport { + + @Test + void contextLoads() {} +} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java index f0fd3244..762d5ca4 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java @@ -13,7 +13,7 @@ import org.springframework.http.*; import org.springframework.test.context.jdbc.Sql; -import com.gltkorea.icebang.e2e.support.E2eTestSupport; +import com.gltkorea.icebang.e2e.setup.support.E2eTestSupport; @Sql( value = "classpath:sql/01-insert-internal-users.sql", diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserServiceApplicationE2eTests.java b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserServiceApplicationE2eTests.java deleted file mode 100644 index 2379e450..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserServiceApplicationE2eTests.java +++ /dev/null @@ -1,11 +0,0 @@ -package com.gltkorea.icebang.e2e.scenario; - -import org.junit.jupiter.api.Test; - -import com.gltkorea.icebang.e2e.support.E2eTestSupport; - -class UserServiceApplicationE2eTests extends E2eTestSupport { - - @Test - void contextLoads() {} -} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/annotation/E2eTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/annotation/E2eTest.java similarity index 88% rename from apps/user-service/src/test/java/com/gltkorea/icebang/e2e/annotation/E2eTest.java rename to apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/annotation/E2eTest.java index 0840a996..0f087064 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/annotation/E2eTest.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/annotation/E2eTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.e2e.annotation; +package com.gltkorea.icebang.e2e.setup.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/config/E2eTestConfiguration.java b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/config/E2eTestConfiguration.java similarity index 97% rename from apps/user-service/src/test/java/com/gltkorea/icebang/e2e/config/E2eTestConfiguration.java rename to apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/config/E2eTestConfiguration.java index 7ebe181d..4ee26803 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/config/E2eTestConfiguration.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/config/E2eTestConfiguration.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.e2e.config; +package com.gltkorea.icebang.e2e.setup.config; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.testcontainers.service.connection.ServiceConnection; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/support/E2eTestSupport.java b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/support/E2eTestSupport.java similarity index 91% rename from apps/user-service/src/test/java/com/gltkorea/icebang/e2e/support/E2eTestSupport.java rename to apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/support/E2eTestSupport.java index 12a44848..b72ac031 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/support/E2eTestSupport.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/support/E2eTestSupport.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.e2e.support; +package com.gltkorea.icebang.e2e.setup.support; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; @@ -9,8 +9,8 @@ import org.springframework.web.context.WebApplicationContext; import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper; -import com.gltkorea.icebang.e2e.annotation.E2eTest; -import com.gltkorea.icebang.e2e.config.E2eTestConfiguration; +import com.gltkorea.icebang.e2e.setup.annotation.E2eTest; +import com.gltkorea.icebang.e2e.setup.config.E2eTestConfiguration; @Import(E2eTestConfiguration.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/support/E2eTestSupportTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/support/E2eTestSupportTest.java similarity index 90% rename from apps/user-service/src/test/java/com/gltkorea/icebang/e2e/support/E2eTestSupportTest.java rename to apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/support/E2eTestSupportTest.java index 7eccdd4e..33bfd4dc 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/support/E2eTestSupportTest.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/support/E2eTestSupportTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.e2e.support; +package com.gltkorea.icebang.e2e.setup.support; import static org.assertj.core.api.Assertions.assertThat; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/auth/testa.java b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/auth/testa.java deleted file mode 100644 index 9f273f56..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/auth/testa.java +++ /dev/null @@ -1,287 +0,0 @@ -// package com.gltkorea.icebang.e2e.scenario; -// -// import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; -// import static com.epages.restdocs.apispec.ResourceDocumentation.*; -// import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; -// import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; -// import static org.springframework.restdocs.operation.preprocess.Preprocessors.*; -// import static org.springframework.restdocs.payload.PayloadDocumentation.*; -// import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; -// -// import java.util.Arrays; -// import java.util.HashMap; -// import java.util.Map; -// -// import org.junit.jupiter.api.DisplayName; -// import org.junit.jupiter.api.Test; -// import org.springframework.http.*; -// import org.springframework.restdocs.payload.JsonFieldType; -// import org.springframework.test.annotation.DirtiesContext; -// import org.springframework.test.context.jdbc.Sql; -// -// import com.epages.restdocs.apispec.ResourceSnippetParameters; -// import com.gltkorea.icebang.e2e.support.E2eTestSupport; -// -// @Sql("classpath:sql/01-insert-internal-users.sql") -// @DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) -// class UserRegistrationFlowE2eTest extends E2eTestSupport { -// -// @Test -// @DisplayName("조직 목록 조회 성공") -// void getOrganizations_success() throws Exception { -// mockMvc -// .perform( -// get(getApiUrlForDocs("/v0/organizations")) -// .contentType(MediaType.APPLICATION_JSON) -// .header("Origin", "https://admin.icebang.site") -// .header("Referer", "https://admin.icebang.site/")) -// .andExpect(status().isOk()) -// .andExpect(jsonPath("$.success").value(true)) -// .andExpect(jsonPath("$.status").value("OK")) -// .andExpect(jsonPath("$.message").value("OK")) -// .andExpect(jsonPath("$.data").isArray()) -// .andDo( -// document( -// "organizations-list", -// preprocessRequest(prettyPrint()), -// preprocessResponse(prettyPrint()), -// resource( -// ResourceSnippetParameters.builder() -// .tag("Organization") -// .summary("조직 목록 조회") -// .description("시스템에 등록된 모든 조직의 목록을 조회합니다") -// .responseFields( -// fieldWithPath("success") -// .type(JsonFieldType.BOOLEAN) -// .description("요청 성공 여부"), -// -// fieldWithPath("data[]").type(JsonFieldType.ARRAY).description("조직 목록"), -// fieldWithPath("data[].id") -// .type(JsonFieldType.NUMBER) -// .description("조직 ID"), -// fieldWithPath("data[].organizationName") -// .type(JsonFieldType.STRING) -// .description("조직명"), -// fieldWithPath("message") -// .type(JsonFieldType.STRING) -// .description("응답 메시지"), -// fieldWithPath("status") -// .type(JsonFieldType.STRING) -// .description("HTTP 상태")) -// .build()))); -// } -// -// @Test -// @DisplayName("조직별 옵션 조회 성공") -// void getOrganizationOptions_success() throws Exception { -// mockMvc -// .perform( -// get(getApiUrlForDocs("/v0/organizations/{orgId}/options"), 1) -// .contentType(MediaType.APPLICATION_JSON) -// .header("Origin", "https://admin.icebang.site") -// .header("Referer", "https://admin.icebang.site/")) -// .andExpect(status().isOk()) -// .andExpect(jsonPath("$.success").value(true)) -// .andExpect(jsonPath("$.status").value("OK")) -// .andExpect(jsonPath("$.message").value("OK")) -// .andExpect(jsonPath("$.data.departments").isArray()) -// .andExpect(jsonPath("$.data.positions").isArray()) -// .andExpect(jsonPath("$.data.roles").isArray()) -// .andDo( -// document( -// "organization-options", -// preprocessRequest(prettyPrint()), -// preprocessResponse(prettyPrint()), -// resource( -// ResourceSnippetParameters.builder() -// .tag("Organization") -// .summary("조직별 옵션 조회") -// .description("특정 조직의 부서, 직급, 역할 정보를 조회합니다") -// .responseFields( -// fieldWithPath("success") -// .type(JsonFieldType.BOOLEAN) -// .description("요청 성공 여부"), -// fieldWithPath("data") -// .type(JsonFieldType.OBJECT) -// .description("조직 옵션 데이터"), -// fieldWithPath("data.departments[]") -// .type(JsonFieldType.ARRAY) -// .description("부서 목록"), -// fieldWithPath("data.departments[].id") -// .type(JsonFieldType.NUMBER) -// .description("부서 ID"), -// fieldWithPath("data.departments[].name") -// .type(JsonFieldType.STRING) -// .description("부서명"), -// fieldWithPath("data.positions[]") -// .type(JsonFieldType.ARRAY) -// .description("직급 목록"), -// fieldWithPath("data.positions[].id") -// .type(JsonFieldType.NUMBER) -// .description("직급 ID"), -// fieldWithPath("data.positions[].title") -// .type(JsonFieldType.STRING) -// .description("직급명"), -// fieldWithPath("data.roles[]") -// .type(JsonFieldType.ARRAY) -// .description("역할 목록"), -// fieldWithPath("data.roles[].id") -// .type(JsonFieldType.NUMBER) -// .description("역할 ID"), -// fieldWithPath("data.roles[].name") -// .type(JsonFieldType.STRING) -// .description("역할 코드명"), -// fieldWithPath("data.roles[].description") -// .type(JsonFieldType.STRING) -// .description("역할 설명"), -// fieldWithPath("message") -// .type(JsonFieldType.STRING) -// .description("응답 메시지"), -// fieldWithPath("status") -// .type(JsonFieldType.STRING) -// .description("HTTP 상태")) -// .build()))); -// } -// -// @Test -// @DisplayName("사용자 로그인 성공") -// void login_success() throws Exception { -// // given -// Map loginRequest = new HashMap<>(); -// loginRequest.put("email", "admin@icebang.site"); -// loginRequest.put("password", "qwer1234!A"); -// -// // MockMvc로 REST Docs + OpenAPI 생성 -// mockMvc -// .perform( -// post(getApiUrlForDocs("/v0/auth/login")) -// .contentType(MediaType.APPLICATION_JSON) -// .header("Origin", "https://admin.icebang.site") -// .header("Referer", "https://admin.icebang.site/") -// .content(objectMapper.writeValueAsString(loginRequest))) -// .andExpect(status().isOk()) -// .andExpect(jsonPath("$.success").value(true)) -// .andExpect(jsonPath("$.status").value("OK")) -// .andExpect(jsonPath("$.message").value("OK")) -// .andExpect(jsonPath("$.data").isEmpty()) -// .andDo( -// document( -// "auth-login", -// preprocessRequest(prettyPrint()), -// preprocessResponse(prettyPrint()), -// resource( -// ResourceSnippetParameters.builder() -// .tag("Authentication") -// .summary("사용자 로그인") -// .description("이메일과 비밀번호로 사용자 인증을 수행합니다") -// .requestFields( -// fieldWithPath("email") -// .type(JsonFieldType.STRING) -// .description("사용자 이메일 주소"), -// fieldWithPath("password") -// .type(JsonFieldType.STRING) -// .description("사용자 비밀번호")) -// .responseFields( -// fieldWithPath("success") -// .type(JsonFieldType.BOOLEAN) -// .description("요청 성공 여부"), -// fieldWithPath("data") -// .type(JsonFieldType.NULL) -// .description("응답 데이터 (로그인 성공 시 -// null)"), -// fieldWithPath("message") -// .type(JsonFieldType.STRING) -// .description("응답 메시지"), -// fieldWithPath("status") -// .type(JsonFieldType.STRING) -// .description("HTTP 상태")) -// .build()))); -// } -// -// @Test -// @DisplayName("사용자 회원가입 성공") -// void register_success() throws Exception { -// // given - 먼저 로그인하여 인증 토큰 획득 -// Map loginRequest = new HashMap<>(); -// loginRequest.put("email", "admin@icebang.site"); -// loginRequest.put("password", "qwer1234!A"); -// -// // 로그인 수행 (실제 환경에서는 토큰을 헤더에 추가해야 할 수 있음) -// mockMvc -// .perform( -// post("/v0/auth/login") -// .contentType(MediaType.APPLICATION_JSON) -// .content(objectMapper.writeValueAsString(loginRequest))) -// .andExpect(status().isOk()); -// -// // 회원가입 요청 데이터 -// Map registerRequest = new HashMap<>(); -// registerRequest.put("name", "김철수"); -// registerRequest.put("email", "kim.chulsoo@example.com"); -// registerRequest.put("orgId", 1); -// registerRequest.put("deptId", 2); -// registerRequest.put("positionId", 5); -// registerRequest.put("roleIds", Arrays.asList(6, 7, 8)); -// registerRequest.put("password", null); -// -// // when & then -// mockMvc -// .perform( -// post(getApiUrlForDocs("/v0/auth/register")) -// .contentType(MediaType.APPLICATION_JSON) -// .header("Origin", "https://admin.icebang.site") -// .header("Referer", "https://admin.icebang.site/") -// .content(objectMapper.writeValueAsString(registerRequest))) -// .andExpect(status().isOk()) -// .andExpect(jsonPath("$.success").value(true)) -// .andExpect(jsonPath("$.status").value("OK")) -// .andExpect(jsonPath("$.message").value("OK")) -// .andDo( -// document( -// "auth-register", -// preprocessRequest(prettyPrint()), -// preprocessResponse(prettyPrint()), -// resource( -// ResourceSnippetParameters.builder() -// .tag("Authentication") -// .summary("사용자 회원가입") -// .description("새로운 사용자를 등록합니다. 관리자 로그인 후에만 사용 -// 가능합니다.") -// .requestFields( -// -// fieldWithPath("name").type(JsonFieldType.STRING).description("사용자 이름"), -// fieldWithPath("email") -// .type(JsonFieldType.STRING) -// .description("사용자 이메일 주소"), -// -// fieldWithPath("orgId").type(JsonFieldType.NUMBER).description("조직 ID"), -// -// fieldWithPath("deptId").type(JsonFieldType.NUMBER).description("부서 ID"), -// fieldWithPath("positionId") -// .type(JsonFieldType.NUMBER) -// .description("직급 ID"), -// fieldWithPath("roleIds[]") -// .type(JsonFieldType.ARRAY) -// .description("역할 ID 목록"), -// fieldWithPath("password") -// .type(JsonFieldType.NULL) -// .description("비밀번호 (null인 경우 시스템에서 -// 자동 생성)") -// .optional()) -// .responseFields( -// fieldWithPath("success") -// .type(JsonFieldType.BOOLEAN) -// .description("요청 성공 여부"), -// fieldWithPath("data") -// .type(JsonFieldType.VARIES) -// .description("응답 데이터 (회원가입 결과 -// 정보)"), -// fieldWithPath("message") -// .type(JsonFieldType.STRING) -// .description("응답 메시지"), -// fieldWithPath("status") -// .type(JsonFieldType.STRING) -// .description("HTTP 상태")) -// .build()))); -// } -// } diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/annotation/IntegrationTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/annotation/IntegrationTest.java similarity index 87% rename from apps/user-service/src/test/java/com/gltkorea/icebang/integration/annotation/IntegrationTest.java rename to apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/annotation/IntegrationTest.java index ca4e4046..ec111866 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/annotation/IntegrationTest.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/annotation/IntegrationTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.integration.annotation; +package com.gltkorea.icebang.integration.setup.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/config/RestDocsConfiguration.java b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/config/RestDocsConfiguration.java similarity index 94% rename from apps/user-service/src/test/java/com/gltkorea/icebang/integration/config/RestDocsConfiguration.java rename to apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/config/RestDocsConfiguration.java index 319860ad..eeb97ffc 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/config/RestDocsConfiguration.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/config/RestDocsConfiguration.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.integration.config; +package com.gltkorea.icebang.integration.setup.config; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/support/IntegrationTestSupport.java b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/support/IntegrationTestSupport.java new file mode 100644 index 00000000..037f37e5 --- /dev/null +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/support/IntegrationTestSupport.java @@ -0,0 +1,35 @@ +package com.gltkorea.icebang.integration.setup.support; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.restdocs.AutoConfigureRestDocs; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.web.server.LocalServerPort; +import org.springframework.context.annotation.Import; +import org.springframework.test.web.servlet.MockMvc; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.gltkorea.icebang.integration.setup.annotation.IntegrationTest; +import com.gltkorea.icebang.integration.setup.config.RestDocsConfiguration; + +@IntegrationTest +@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) +@AutoConfigureMockMvc +@AutoConfigureRestDocs +@Import(RestDocsConfiguration.class) +public abstract class IntegrationTestSupport { + + @Autowired protected MockMvc mockMvc; + + @Autowired protected ObjectMapper objectMapper; + + @LocalServerPort protected int port; + + /** RestDocs에서 실제 API 호출 주소를 표기할 때 사용 */ + protected String getApiUrlForDocs(String path) { + if (path.startsWith("/")) { + return "http://localhost:" + port + path; + } + return "http://localhost:" + port + "/" + path; + } +} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/support/IntegrationTestSupport.java b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/support/IntegrationTestSupport.java deleted file mode 100644 index 9eb0b356..00000000 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/support/IntegrationTestSupport.java +++ /dev/null @@ -1,13 +0,0 @@ -package com.gltkorea.icebang.integration.support; - -import org.junit.jupiter.api.extension.ExtendWith; -import org.springframework.restdocs.RestDocumentationExtension; -import org.springframework.test.web.servlet.MockMvc; - -import com.gltkorea.icebang.integration.annotation.IntegrationTest; - -@IntegrationTest -@ExtendWith(RestDocumentationExtension.class) -public abstract class IntegrationTestSupport { - protected MockMvc mockMvc; -} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/tests/auth/AuthApiIntegrationTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/tests/auth/AuthApiIntegrationTest.java new file mode 100644 index 00000000..0d1e5d19 --- /dev/null +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/tests/auth/AuthApiIntegrationTest.java @@ -0,0 +1,81 @@ +package com.gltkorea.icebang.integration.tests.auth; + +import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; +import static com.epages.restdocs.apispec.ResourceDocumentation.*; +import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; +import static org.springframework.restdocs.operation.preprocess.Preprocessors.*; +import static org.springframework.restdocs.payload.PayloadDocumentation.*; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.http.*; +import org.springframework.restdocs.payload.JsonFieldType; +import org.springframework.test.context.jdbc.Sql; +import org.springframework.transaction.annotation.Transactional; + +import com.epages.restdocs.apispec.ResourceSnippetParameters; +import com.gltkorea.icebang.integration.setup.support.IntegrationTestSupport; + +@Sql( + value = "classpath:sql/01-insert-internal-users.sql", + executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) +@Transactional +class AuthApiIntegrationTest extends IntegrationTestSupport { + @Test + @DisplayName("사용자 로그인 성공") + void login_success() throws Exception { + // given + Map loginRequest = new HashMap<>(); + loginRequest.put("email", "admin@icebang.site"); + loginRequest.put("password", "qwer1234!A"); + + // MockMvc로 REST Docs + OpenAPI 생성 + mockMvc + .perform( + post(getApiUrlForDocs("/v0/auth/login")) + .contentType(MediaType.APPLICATION_JSON) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/") + .content(objectMapper.writeValueAsString(loginRequest))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.status").value("OK")) + .andExpect(jsonPath("$.message").value("OK")) + .andExpect(jsonPath("$.data").isEmpty()) + .andDo( + document( + "auth-login", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Authentication") + .summary("사용자 로그인") + .description("이메일과 비밀번호로 사용자 인증을 수행합니다") + .requestFields( + fieldWithPath("email") + .type(JsonFieldType.STRING) + .description("사용자 이메일 주소"), + fieldWithPath("password") + .type(JsonFieldType.STRING) + .description("사용자 비밀번호")) + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부"), + fieldWithPath("data") + .type(JsonFieldType.NULL) + .description("응답 데이터 (로그인 성공 시 null)"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태")) + .build()))); + } +} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java new file mode 100644 index 00000000..5d458146 --- /dev/null +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java @@ -0,0 +1,170 @@ +package com.gltkorea.icebang.integration.tests.organization; + +import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; +import static com.epages.restdocs.apispec.ResourceDocumentation.*; +import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; +import static org.springframework.restdocs.operation.preprocess.Preprocessors.*; +import static org.springframework.restdocs.payload.PayloadDocumentation.*; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.http.*; +import org.springframework.restdocs.payload.JsonFieldType; +import org.springframework.test.context.jdbc.Sql; +import org.springframework.test.web.servlet.MvcResult; +import org.springframework.transaction.annotation.Transactional; + +import com.epages.restdocs.apispec.ResourceSnippetParameters; +import com.fasterxml.jackson.databind.JsonNode; +import com.gltkorea.icebang.integration.setup.support.IntegrationTestSupport; + +@Sql( + value = { + "classpath:sql/01-insert-internal-users.sql", + "classpath:sql/02-insert-external-users.sql" + }, + executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) +@Transactional +class OrganizationApiIntegrationTest extends IntegrationTestSupport { + + @Test + @DisplayName("조직 목록 조회 성공") + void getOrganizations_success() throws Exception { + // when & then + mockMvc + .perform( + get(getApiUrlForDocs("/v0/organizations")) + .contentType(MediaType.APPLICATION_JSON) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.status").value("OK")) + .andExpect(jsonPath("$.message").value("OK")) + .andExpect(jsonPath("$.data").isArray()) + .andExpect(jsonPath("$.data[0].id").exists()) + .andExpect(jsonPath("$.data[0].organizationName").exists()) + .andDo( + document( + "organizations-list", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Organization") + .summary("조직 목록 조회") + .description("시스템에 등록된 모든 조직의 목록을 조회합니다") + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부"), + fieldWithPath("data[]").type(JsonFieldType.ARRAY).description("조직 목록"), + fieldWithPath("data[].id") + .type(JsonFieldType.NUMBER) + .description("조직 ID"), + fieldWithPath("data[].organizationName") + .type(JsonFieldType.STRING) + .description("조직명"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태")) + .build()))); + } + + @Test + @DisplayName("조직 옵션 정보 조회 성공") + void getOrganizationOptions_success() throws Exception { + // given - 먼저 조직 목록을 조회해서 실제 존재하는 ID를 가져옴 + MvcResult organizationsResult = + mockMvc + .perform(get("/v0/organizations").contentType(MediaType.APPLICATION_JSON)) + .andExpect(status().isOk()) + .andReturn(); + + String responseBody = organizationsResult.getResponse().getContentAsString(); + JsonNode jsonNode = objectMapper.readTree(responseBody); + JsonNode organizations = jsonNode.get("data"); + + // 첫 번째 조직의 ID를 가져옴 + Long organizationId = organizations.get(0).get("id").asLong(); + + // when & then + mockMvc + .perform( + get(getApiUrlForDocs("/v0/organizations/{organizationId}/options"), organizationId) + .contentType(MediaType.APPLICATION_JSON) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.status").value("OK")) + .andExpect(jsonPath("$.message").value("OK")) + .andExpect(jsonPath("$.data.departments").isArray()) + .andExpect(jsonPath("$.data.positions").isArray()) + .andExpect(jsonPath("$.data.roles").isArray()) + .andExpect(jsonPath("$.data.departments[0].id").exists()) + .andExpect(jsonPath("$.data.departments[0].name").exists()) + .andExpect(jsonPath("$.data.positions[0].id").exists()) + .andExpect(jsonPath("$.data.positions[0].title").exists()) + .andExpect(jsonPath("$.data.roles[0].id").exists()) + .andExpect(jsonPath("$.data.roles[0].name").exists()) + .andExpect(jsonPath("$.data.roles[0].description").exists()) + .andDo( + document( + "organizations-options", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Organization") + .summary("조직 옵션 정보 조회") + .description("특정 조직의 부서, 직급, 역할 옵션 정보를 조회합니다") + .pathParameters(parameterWithName("organizationId").description("조직 ID")) + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부"), + fieldWithPath("data").type(JsonFieldType.OBJECT).description("옵션 데이터"), + fieldWithPath("data.departments[]") + .type(JsonFieldType.ARRAY) + .description("부서 목록"), + fieldWithPath("data.departments[].id") + .type(JsonFieldType.NUMBER) + .description("부서 ID"), + fieldWithPath("data.departments[].name") + .type(JsonFieldType.STRING) + .description("부서명"), + fieldWithPath("data.positions[]") + .type(JsonFieldType.ARRAY) + .description("직급 목록"), + fieldWithPath("data.positions[].id") + .type(JsonFieldType.NUMBER) + .description("직급 ID"), + fieldWithPath("data.positions[].title") + .type(JsonFieldType.STRING) + .description("직급명"), + fieldWithPath("data.roles[]") + .type(JsonFieldType.ARRAY) + .description("역할 목록"), + fieldWithPath("data.roles[].id") + .type(JsonFieldType.NUMBER) + .description("역할 ID"), + fieldWithPath("data.roles[].name") + .type(JsonFieldType.STRING) + .description("역할명"), + fieldWithPath("data.roles[].description") + .type(JsonFieldType.STRING) + .description("역할 설명"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태")) + .build()))); + } +} diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/unit/annotation/UnitTest.java b/apps/user-service/src/test/java/com/gltkorea/icebang/unit/setup/annotation/UnitTest.java similarity index 88% rename from apps/user-service/src/test/java/com/gltkorea/icebang/unit/annotation/UnitTest.java rename to apps/user-service/src/test/java/com/gltkorea/icebang/unit/setup/annotation/UnitTest.java index 117a5cb2..cb2f975c 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/unit/annotation/UnitTest.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/unit/setup/annotation/UnitTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.unit.annotation; +package com.gltkorea.icebang.unit.setup.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/unit/support/UnitTestSupport.java b/apps/user-service/src/test/java/com/gltkorea/icebang/unit/setup/support/UnitTestSupport.java similarity index 60% rename from apps/user-service/src/test/java/com/gltkorea/icebang/unit/support/UnitTestSupport.java rename to apps/user-service/src/test/java/com/gltkorea/icebang/unit/setup/support/UnitTestSupport.java index be4c8660..9bc71657 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/unit/support/UnitTestSupport.java +++ b/apps/user-service/src/test/java/com/gltkorea/icebang/unit/setup/support/UnitTestSupport.java @@ -1,6 +1,6 @@ -package com.gltkorea.icebang.unit.support; +package com.gltkorea.icebang.unit.setup.support; -import com.gltkorea.icebang.unit.annotation.UnitTest; +import com.gltkorea.icebang.unit.setup.annotation.UnitTest; @UnitTest public abstract class UnitTestSupport { From d6c1e00f8cb93327ec844ad26ca0d9c0f77e4601 Mon Sep 17 00:00:00 2001 From: Jihu Kim Date: Wed, 10 Sep 2025 15:45:31 +0900 Subject: [PATCH 18/31] FastAPI Ping Pong Test (#62) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feature: FastAPI Ping Test * fix: API url 수정 * refactor: Code Formatting * fix: PUBLIC Endpoints에 /ping 추가 --- apps/pre-processing-service/app/api/router.py | 2 +- .../health/api/HealthCheckController.java | 34 +++++++++++++++ .../common/health/service/FastApiClient.java | 42 +++++++++++++++++++ .../security/endpoints/SecurityEndpoints.java | 1 + 4 files changed, 78 insertions(+), 1 deletion(-) create mode 100644 apps/user-service/src/main/java/com/gltkorea/icebang/common/health/api/HealthCheckController.java create mode 100644 apps/user-service/src/main/java/com/gltkorea/icebang/common/health/service/FastApiClient.java diff --git a/apps/pre-processing-service/app/api/router.py b/apps/pre-processing-service/app/api/router.py index 683f42a7..dce62c5c 100644 --- a/apps/pre-processing-service/app/api/router.py +++ b/apps/pre-processing-service/app/api/router.py @@ -17,7 +17,7 @@ #모듈 테스터를 위한 endpoint -> 추후 삭제 예정 api_router.include_router(test.router, prefix="/tests", tags=["Test"]) -@api_router.get("/") +@api_router.get("/ping") async def root(): return {"message": "서버 실행중입니다."} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/common/health/api/HealthCheckController.java b/apps/user-service/src/main/java/com/gltkorea/icebang/common/health/api/HealthCheckController.java new file mode 100644 index 00000000..823272b2 --- /dev/null +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/common/health/api/HealthCheckController.java @@ -0,0 +1,34 @@ +package com.gltkorea.icebang.common.health.api; + +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RestController; + +import com.gltkorea.icebang.common.health.service.FastApiClient; + +import lombok.RequiredArgsConstructor; + +@RestController +@RequiredArgsConstructor +public class HealthCheckController { + + private final FastApiClient fastApiClient; + + /** + * Spring Boot와 FastAPI 서버 간의 연결 상태를 확인하는 헬스 체크 API + * + * @return FastAPI 서버로부터의 응답 + */ + @GetMapping("/ping") + public ResponseEntity pingFastApi() { + String result = fastApiClient.ping(); + + if (result.startsWith("ERROR")) { + // FastAPI 연결 실패 시 503 Service Unavailable 상태 코드와 함께 에러 메시지 반환 + return ResponseEntity.status(503).body(result); + } + + // 성공 시 200 OK 상태 코드와 함께 FastAPI로부터 받은 응답("PONG" 등) 반환 + return ResponseEntity.ok(result); + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/common/health/service/FastApiClient.java b/apps/user-service/src/main/java/com/gltkorea/icebang/common/health/service/FastApiClient.java new file mode 100644 index 00000000..dac03786 --- /dev/null +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/common/health/service/FastApiClient.java @@ -0,0 +1,42 @@ +package com.gltkorea.icebang.common.health.service; + +import org.springframework.stereotype.Service; +import org.springframework.web.client.RestClientException; +import org.springframework.web.client.RestTemplate; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Service +@RequiredArgsConstructor +public class FastApiClient { + + // WebConfig에서 생성하고 타임아웃이 설정된 RestTemplate Bean을 주입받습니다. + private final RestTemplate restTemplate; + + // FastAPI 서버의 ping 엔드포인트 URL을 상수로 하드코딩합니다. + private static final String FASTAPI_PING_URL = "http://localhost:8000/ping"; + + /** + * FastAPI 서버의 /ping 엔드포인트를 호출하여 연결을 테스트합니다. + * + * @return 연결 성공 시 FastAPI로부터 받은 응답, 실패 시 에러 메시지 + */ + public String ping() { + log.info("Attempting to connect to FastAPI server at: {}", FASTAPI_PING_URL); + + try { + // FastAPI 서버에 GET 요청을 보내고, 응답을 String으로 받습니다. + // WebConfig에 설정된 5초 타임아웃이 여기서 적용됩니다. + String response = restTemplate.getForObject(FASTAPI_PING_URL, String.class); + log.info("Successfully received response from FastAPI: {}", response); + return response; + } catch (RestClientException e) { + // RestClientException은 연결 실패, 타임아웃 등 모든 통신 오류를 포함합니다. + log.error( + "Failed to connect to FastAPI server at {}. Error: {}", FASTAPI_PING_URL, e.getMessage()); + return "ERROR: Cannot connect to FastAPI"; + } + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java b/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java index da658775..5236edef 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java +++ b/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java @@ -3,6 +3,7 @@ public enum SecurityEndpoints { PUBLIC( "/", + "/ping", "/v0/auth/login", "/api/public/**", "/health", From d28a981839941cc16162a2ee96ca8427a7fbef9b Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Wed, 10 Sep 2025 16:14:07 +0900 Subject: [PATCH 19/31] =?UTF-8?q?Spring=20=EB=94=94=EB=A0=89=ED=86=A0?= =?UTF-8?q?=EB=A6=AC=20=EA=B5=AC=EC=A1=B0=20=EA=B0=9C=EC=84=A0=20=EB=B0=8F?= =?UTF-8?q?=20glt=20Korea=20=EC=A0=9C=EA=B1=B0=20(#63)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: mapper 및 global위치 조정 * chore: GLT korea 제거 --- apps/user-service/build.gradle | 6 ++-- .../domain/email/service/EmailService.java | 7 ---- .../domain/user/service/UserService.java | 33 ------------------- .../com/gltkorea/icebang/entity/Users.java | 22 ------------- .../icebang/UserServiceApplication.java | 4 +-- .../batch/job/BlogContentJobConfig.java | 8 ++--- .../tasklet/ContentGenerationTasklet.java | 2 +- .../tasklet/KeywordExtractionTasklet.java | 2 +- .../icebang/common/dto/ApiResponse.java | 2 +- .../health/api/HealthCheckController.java | 6 ++-- .../common/health/service/FastApiClient.java | 2 +- .../common/utils/RandomPasswordGenerator.java | 2 +- .../auth/controller/AuthController.java | 14 ++++---- .../domain/auth/dto/LoginRequestDto.java | 2 +- .../icebang/domain/auth/dto/RegisterDto.java | 2 +- .../domain/auth}/mapper/AuthMapper.java | 6 ++-- .../domain/auth/model/AuthCredential.java | 2 +- .../auth/service/AuthCredentialAdapter.java | 8 ++--- .../domain/auth/service/AuthService.java | 14 ++++---- .../department/dto/DepartmentCardDo.java | 2 +- .../domain/email/dto/EmailRequest.java | 2 +- .../domain/email/service/EmailService.java | 7 ++++ .../email/service/EmailServiceImpl.java | 6 ++-- .../email/service/MockEmailService.java | 6 ++-- .../controller/OrganizationController.java | 12 +++---- .../organization/dto/OrganizationCardDto.java | 2 +- .../dto/OrganizationOptionDto.java | 10 +++--- .../mapper/OrganizationMapper.java | 10 +++--- .../service/OrganizationService.java | 16 ++++----- .../domain/position/dto/PositionCardDto.java | 2 +- .../icebang/domain/roles/dto/RoleCardDto.java | 2 +- .../schedule}/mapper/ScheduleMapper.java | 4 +-- .../domain/schedule/model/Schedule.java | 2 +- .../schedule/runner/SchedulerInitializer.java | 10 +++--- .../service/DynamicSchedulerService.java | 6 ++-- .../user/controller/UserController.java | 16 ++++----- .../domain/user/dto/CheckEmailRequest.java | 2 +- .../domain/user/dto/CheckEmailResponse.java | 2 +- .../user/dto/UserProfileResponseDto.java | 6 ++-- .../domain/user}/mapper/UserMapper.java | 2 +- .../domain/user/service/UserService.java | 21 ++++++++++++ .../global}/aop/logging/LoggingAspect.java | 8 ++--- .../icebang/global}/config/WebConfig.java | 2 +- .../typehandler/StringListTypeHandler.java | 2 +- .../config/scheduler/SchedulerConfig.java | 2 +- .../config/security/SecurityConfig.java | 8 ++--- .../security/endpoints/SecurityEndpoints.java | 2 +- .../icebang/global}/filter/LoggingFilter.java | 2 +- .../main/resources/application-develop.yml | 2 +- .../main/resources/application-test-e2e.yml | 2 +- .../application-test-integration.yml | 2 +- .../main/resources/application-test-unit.yml | 2 +- .../src/main/resources/application.yml | 2 +- .../src/main/resources/log4j2-develop.yml | 4 +-- .../src/main/resources/log4j2-production.yml | 2 +- .../src/main/resources/log4j2-test-unit.yml | 2 +- .../resources/mybatis/mapper/AuthMapper.xml | 10 +++--- .../mybatis/mapper/OrganizationMapper.xml | 10 +++--- .../mybatis/mapper/ScheduleMapper.xml | 8 ++--- .../icebang/TestUserServiceApplication.java | 2 +- .../icebang/TestcontainersConfiguration.java | 2 +- .../e2e/scenario/ContextLoadE2eTests.java | 4 +-- .../scenario/UserRegistrationFlowE2eTest.java | 4 +-- .../icebang/e2e/setup/annotation/E2eTest.java | 2 +- .../setup/config/E2eTestConfiguration.java | 2 +- .../e2e/setup/support/E2eTestSupport.java | 6 ++-- .../e2e/setup/support/E2eTestSupportTest.java | 2 +- .../setup/annotation/IntegrationTest.java | 2 +- .../setup/config/RestDocsConfiguration.java | 2 +- .../setup/support/IntegrationTestSupport.java | 7 ++-- .../tests/auth/AuthApiIntegrationTest.java | 5 +-- .../OrganizationApiIntegrationTest.java | 5 +-- .../unit/setup/annotation/UnitTest.java | 2 +- .../unit/setup/support/UnitTestSupport.java | 4 +-- 74 files changed, 192 insertions(+), 223 deletions(-) delete mode 100644 apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/EmailService.java delete mode 100644 apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/service/UserService.java delete mode 100644 apps/user-service/src/main/java/com/gltkorea/icebang/entity/Users.java rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/UserServiceApplication.java (88%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/batch/job/BlogContentJobConfig.java (91%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/batch/tasklet/ContentGenerationTasklet.java (97%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/batch/tasklet/KeywordExtractionTasklet.java (97%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/common/dto/ApiResponse.java (96%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/common/health/api/HealthCheckController.java (89%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/common/health/service/FastApiClient.java (96%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/common/utils/RandomPasswordGenerator.java (97%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/auth/controller/AuthController.java (86%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/auth/dto/LoginRequestDto.java (89%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/auth/dto/RegisterDto.java (96%) rename apps/user-service/src/main/java/{com/gltkorea/icebang => site/icebang/domain/auth}/mapper/AuthMapper.java (70%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/auth/model/AuthCredential.java (97%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/auth/service/AuthCredentialAdapter.java (82%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/auth/service/AuthService.java (81%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/department/dto/DepartmentCardDo.java (81%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/email/dto/EmailRequest.java (85%) create mode 100644 apps/user-service/src/main/java/site/icebang/domain/email/service/EmailService.java rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/email/service/EmailServiceImpl.java (77%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/email/service/MockEmailService.java (82%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/organization/controller/OrganizationController.java (75%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/organization/dto/OrganizationCardDto.java (81%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/organization/dto/OrganizationOptionDto.java (53%) rename apps/user-service/src/main/java/{com/gltkorea/icebang => site/icebang/domain/organization}/mapper/OrganizationMapper.java (65%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/organization/service/OrganizationService.java (68%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/position/dto/PositionCardDto.java (84%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/roles/dto/RoleCardDto.java (86%) rename apps/user-service/src/main/java/{com/gltkorea/icebang => site/icebang/domain/schedule}/mapper/ScheduleMapper.java (63%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/schedule/model/Schedule.java (81%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/schedule/runner/SchedulerInitializer.java (77%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/schedule/service/DynamicSchedulerService.java (95%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/user/controller/UserController.java (70%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/user/dto/CheckEmailRequest.java (88%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/user/dto/CheckEmailResponse.java (73%) rename apps/user-service/src/main/java/{com/gltkorea => site}/icebang/domain/user/dto/UserProfileResponseDto.java (87%) rename apps/user-service/src/main/java/{com/gltkorea/icebang => site/icebang/domain/user}/mapper/UserMapper.java (87%) create mode 100644 apps/user-service/src/main/java/site/icebang/domain/user/service/UserService.java rename apps/user-service/src/main/java/{com/gltkorea/icebang => site/icebang/global}/aop/logging/LoggingAspect.java (87%) rename apps/user-service/src/main/java/{com/gltkorea/icebang => site/icebang/global}/config/WebConfig.java (96%) rename apps/user-service/src/main/java/{com/gltkorea/icebang => site/icebang/global}/config/mybatis/typehandler/StringListTypeHandler.java (96%) rename apps/user-service/src/main/java/{com/gltkorea/icebang => site/icebang/global}/config/scheduler/SchedulerConfig.java (95%) rename apps/user-service/src/main/java/{com/gltkorea/icebang => site/icebang/global}/config/security/SecurityConfig.java (96%) rename apps/user-service/src/main/java/{com/gltkorea/icebang => site/icebang/global}/config/security/endpoints/SecurityEndpoints.java (94%) rename apps/user-service/src/main/java/{com/gltkorea/icebang => site/icebang/global}/filter/LoggingFilter.java (97%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/TestUserServiceApplication.java (90%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/TestcontainersConfiguration.java (83%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/e2e/scenario/ContextLoadE2eTests.java (54%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java (99%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/e2e/setup/annotation/E2eTest.java (88%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/e2e/setup/config/E2eTestConfiguration.java (97%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/e2e/setup/support/E2eTestSupport.java (91%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/e2e/setup/support/E2eTestSupportTest.java (90%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/integration/setup/annotation/IntegrationTest.java (87%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/integration/setup/config/RestDocsConfiguration.java (94%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/integration/setup/support/IntegrationTestSupport.java (84%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/integration/tests/auth/AuthApiIntegrationTest.java (96%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java (98%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/unit/setup/annotation/UnitTest.java (88%) rename apps/user-service/src/test/java/{com/gltkorea => site}/icebang/unit/setup/support/UnitTestSupport.java (60%) diff --git a/apps/user-service/build.gradle b/apps/user-service/build.gradle index c76b49f2..624067f6 100644 --- a/apps/user-service/build.gradle +++ b/apps/user-service/build.gradle @@ -7,9 +7,9 @@ plugins { id 'com.epages.restdocs-api-spec' version '0.18.2' } -group = 'com.gltkorea' +group = 'site.icebang' version = '0.0.1-alpha-SNAPSHOT' -description = 'GLT korea - fast campus team4 ice bang' +description = 'Ice bang - fast campus team4' java { toolchain { @@ -158,7 +158,7 @@ bootJar { spotless { java { googleJavaFormat('1.17.0') - importOrder('java', 'javax', 'org', 'com', '', 'com.movement') + importOrder('java', 'javax', 'org', 'com', '', 'site.icebang') endWithNewline() removeUnusedImports() encoding('UTF-8') diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/EmailService.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/EmailService.java deleted file mode 100644 index ac0b6663..00000000 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/EmailService.java +++ /dev/null @@ -1,7 +0,0 @@ -package com.gltkorea.icebang.domain.email.service; - -import com.gltkorea.icebang.domain.email.dto.EmailRequest; - -public interface EmailService { - void send(EmailRequest emailRequest); -} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/service/UserService.java b/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/service/UserService.java deleted file mode 100644 index fcf87ac9..00000000 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/service/UserService.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.gltkorea.icebang.domain.user.service; - -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import com.gltkorea.icebang.domain.auth.dto.RegisterDto; -import com.gltkorea.icebang.domain.user.dto.CheckEmailRequest; -import com.gltkorea.icebang.entity.Users; -import com.gltkorea.icebang.mapper.UserMapper; - -import jakarta.validation.Valid; -import lombok.RequiredArgsConstructor; - -@Service -@RequiredArgsConstructor -public class UserService { - private final UserMapper userMapper; - - public void registerUser(RegisterDto registerDto) { - Users user = - Users.builder() - .name(registerDto.getName()) - .email(registerDto.getEmail()) - .password(registerDto.getPassword()) - .status("PENDING") - .build(); - } - - @Transactional(readOnly = true) - public Boolean isExistEmail(@Valid CheckEmailRequest request) { - return userMapper.existsByEmail(request.getEmail()); - } -} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/entity/Users.java b/apps/user-service/src/main/java/com/gltkorea/icebang/entity/Users.java deleted file mode 100644 index 44f30244..00000000 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/entity/Users.java +++ /dev/null @@ -1,22 +0,0 @@ -package com.gltkorea.icebang.entity; - -import java.math.BigInteger; -import java.time.LocalDateTime; - -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Data; - -@Data -@Builder -@AllArgsConstructor -@Deprecated -public class Users { - private BigInteger id; - private String name; - private String email; - private String password; - private String status; - private LocalDateTime createdAt; - private LocalDateTime updatedAt; -} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/UserServiceApplication.java b/apps/user-service/src/main/java/site/icebang/UserServiceApplication.java similarity index 88% rename from apps/user-service/src/main/java/com/gltkorea/icebang/UserServiceApplication.java rename to apps/user-service/src/main/java/site/icebang/UserServiceApplication.java index 002a6bc4..68da9f2a 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/UserServiceApplication.java +++ b/apps/user-service/src/main/java/site/icebang/UserServiceApplication.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang; +package site.icebang; import org.mybatis.spring.annotation.MapperScan; import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; @@ -9,7 +9,7 @@ @EnableScheduling @EnableBatchProcessing @SpringBootApplication -@MapperScan("com.gltkorea.icebang.mapper") +@MapperScan("site.icebang.**.mapper") public class UserServiceApplication { public static void main(String[] args) { diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/batch/job/BlogContentJobConfig.java b/apps/user-service/src/main/java/site/icebang/batch/job/BlogContentJobConfig.java similarity index 91% rename from apps/user-service/src/main/java/com/gltkorea/icebang/batch/job/BlogContentJobConfig.java rename to apps/user-service/src/main/java/site/icebang/batch/job/BlogContentJobConfig.java index 61626411..5e85fe9f 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/batch/job/BlogContentJobConfig.java +++ b/apps/user-service/src/main/java/site/icebang/batch/job/BlogContentJobConfig.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.batch.job; +package site.icebang.batch.job; import org.springframework.batch.core.Job; import org.springframework.batch.core.Step; @@ -9,11 +9,11 @@ import org.springframework.context.annotation.Configuration; import org.springframework.transaction.PlatformTransactionManager; -import com.gltkorea.icebang.batch.tasklet.ContentGenerationTasklet; -import com.gltkorea.icebang.batch.tasklet.KeywordExtractionTasklet; - import lombok.RequiredArgsConstructor; +import site.icebang.batch.tasklet.ContentGenerationTasklet; +import site.icebang.batch.tasklet.KeywordExtractionTasklet; + @Configuration @RequiredArgsConstructor public class BlogContentJobConfig { diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/batch/tasklet/ContentGenerationTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/ContentGenerationTasklet.java similarity index 97% rename from apps/user-service/src/main/java/com/gltkorea/icebang/batch/tasklet/ContentGenerationTasklet.java rename to apps/user-service/src/main/java/site/icebang/batch/tasklet/ContentGenerationTasklet.java index 5cc8918a..a6ef4505 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/batch/tasklet/ContentGenerationTasklet.java +++ b/apps/user-service/src/main/java/site/icebang/batch/tasklet/ContentGenerationTasklet.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.batch.tasklet; +package site.icebang.batch.tasklet; import java.util.List; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/batch/tasklet/KeywordExtractionTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/KeywordExtractionTasklet.java similarity index 97% rename from apps/user-service/src/main/java/com/gltkorea/icebang/batch/tasklet/KeywordExtractionTasklet.java rename to apps/user-service/src/main/java/site/icebang/batch/tasklet/KeywordExtractionTasklet.java index 520403b3..ebc27117 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/batch/tasklet/KeywordExtractionTasklet.java +++ b/apps/user-service/src/main/java/site/icebang/batch/tasklet/KeywordExtractionTasklet.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.batch.tasklet; +package site.icebang.batch.tasklet; import java.util.List; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/common/dto/ApiResponse.java b/apps/user-service/src/main/java/site/icebang/common/dto/ApiResponse.java similarity index 96% rename from apps/user-service/src/main/java/com/gltkorea/icebang/common/dto/ApiResponse.java rename to apps/user-service/src/main/java/site/icebang/common/dto/ApiResponse.java index 7cf5edb3..0f99e59b 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/common/dto/ApiResponse.java +++ b/apps/user-service/src/main/java/site/icebang/common/dto/ApiResponse.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.common.dto; +package site.icebang.common.dto; import org.springframework.http.HttpStatus; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/common/health/api/HealthCheckController.java b/apps/user-service/src/main/java/site/icebang/common/health/api/HealthCheckController.java similarity index 89% rename from apps/user-service/src/main/java/com/gltkorea/icebang/common/health/api/HealthCheckController.java rename to apps/user-service/src/main/java/site/icebang/common/health/api/HealthCheckController.java index 823272b2..8b65e7a0 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/common/health/api/HealthCheckController.java +++ b/apps/user-service/src/main/java/site/icebang/common/health/api/HealthCheckController.java @@ -1,13 +1,13 @@ -package com.gltkorea.icebang.common.health.api; +package site.icebang.common.health.api; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RestController; -import com.gltkorea.icebang.common.health.service.FastApiClient; - import lombok.RequiredArgsConstructor; +import site.icebang.common.health.service.FastApiClient; + @RestController @RequiredArgsConstructor public class HealthCheckController { diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/common/health/service/FastApiClient.java b/apps/user-service/src/main/java/site/icebang/common/health/service/FastApiClient.java similarity index 96% rename from apps/user-service/src/main/java/com/gltkorea/icebang/common/health/service/FastApiClient.java rename to apps/user-service/src/main/java/site/icebang/common/health/service/FastApiClient.java index dac03786..8d8ff496 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/common/health/service/FastApiClient.java +++ b/apps/user-service/src/main/java/site/icebang/common/health/service/FastApiClient.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.common.health.service; +package site.icebang.common.health.service; import org.springframework.stereotype.Service; import org.springframework.web.client.RestClientException; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/common/utils/RandomPasswordGenerator.java b/apps/user-service/src/main/java/site/icebang/common/utils/RandomPasswordGenerator.java similarity index 97% rename from apps/user-service/src/main/java/com/gltkorea/icebang/common/utils/RandomPasswordGenerator.java rename to apps/user-service/src/main/java/site/icebang/common/utils/RandomPasswordGenerator.java index 3716e5b6..c77189c2 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/common/utils/RandomPasswordGenerator.java +++ b/apps/user-service/src/main/java/site/icebang/common/utils/RandomPasswordGenerator.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.common.utils; +package site.icebang.common.utils; import java.security.SecureRandom; import java.util.Collections; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/controller/AuthController.java b/apps/user-service/src/main/java/site/icebang/domain/auth/controller/AuthController.java similarity index 86% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/controller/AuthController.java rename to apps/user-service/src/main/java/site/icebang/domain/auth/controller/AuthController.java index a33503cf..d0a98142 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/controller/AuthController.java +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/controller/AuthController.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.auth.controller; +package site.icebang.domain.auth.controller; import org.springframework.http.HttpStatus; import org.springframework.security.authentication.AuthenticationManager; @@ -9,17 +9,17 @@ import org.springframework.security.web.context.HttpSessionSecurityContextRepository; import org.springframework.web.bind.annotation.*; -import com.gltkorea.icebang.common.dto.ApiResponse; -import com.gltkorea.icebang.domain.auth.dto.LoginRequestDto; -import com.gltkorea.icebang.domain.auth.dto.RegisterDto; -import com.gltkorea.icebang.domain.auth.model.AuthCredential; -import com.gltkorea.icebang.domain.auth.service.AuthService; - import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpSession; import jakarta.validation.Valid; import lombok.RequiredArgsConstructor; +import site.icebang.common.dto.ApiResponse; +import site.icebang.domain.auth.dto.LoginRequestDto; +import site.icebang.domain.auth.dto.RegisterDto; +import site.icebang.domain.auth.model.AuthCredential; +import site.icebang.domain.auth.service.AuthService; + @RestController @RequestMapping("/v0/auth") @RequiredArgsConstructor diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/dto/LoginRequestDto.java b/apps/user-service/src/main/java/site/icebang/domain/auth/dto/LoginRequestDto.java similarity index 89% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/dto/LoginRequestDto.java rename to apps/user-service/src/main/java/site/icebang/domain/auth/dto/LoginRequestDto.java index 081d2016..f3b64e0b 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/dto/LoginRequestDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/dto/LoginRequestDto.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.auth.dto; +package site.icebang.domain.auth.dto; import jakarta.validation.constraints.Email; import jakarta.validation.constraints.NotBlank; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/dto/RegisterDto.java b/apps/user-service/src/main/java/site/icebang/domain/auth/dto/RegisterDto.java similarity index 96% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/dto/RegisterDto.java rename to apps/user-service/src/main/java/site/icebang/domain/auth/dto/RegisterDto.java index 1ff305aa..58cef092 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/dto/RegisterDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/dto/RegisterDto.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.auth.dto; +package site.icebang.domain.auth.dto; import java.math.BigInteger; import java.util.Set; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/AuthMapper.java b/apps/user-service/src/main/java/site/icebang/domain/auth/mapper/AuthMapper.java similarity index 70% rename from apps/user-service/src/main/java/com/gltkorea/icebang/mapper/AuthMapper.java rename to apps/user-service/src/main/java/site/icebang/domain/auth/mapper/AuthMapper.java index 4480daf0..ddc07ffe 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/AuthMapper.java +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/mapper/AuthMapper.java @@ -1,9 +1,9 @@ -package com.gltkorea.icebang.mapper; +package site.icebang.domain.auth.mapper; import org.apache.ibatis.annotations.Mapper; -import com.gltkorea.icebang.domain.auth.dto.RegisterDto; -import com.gltkorea.icebang.domain.auth.model.AuthCredential; +import site.icebang.domain.auth.dto.RegisterDto; +import site.icebang.domain.auth.model.AuthCredential; @Mapper public interface AuthMapper { diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/model/AuthCredential.java b/apps/user-service/src/main/java/site/icebang/domain/auth/model/AuthCredential.java similarity index 97% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/model/AuthCredential.java rename to apps/user-service/src/main/java/site/icebang/domain/auth/model/AuthCredential.java index ab4acc2e..22ef38f2 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/model/AuthCredential.java +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/model/AuthCredential.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.auth.model; +package site.icebang.domain.auth.model; import java.math.BigInteger; import java.util.ArrayList; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/service/AuthCredentialAdapter.java b/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthCredentialAdapter.java similarity index 82% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/service/AuthCredentialAdapter.java rename to apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthCredentialAdapter.java index e3268314..86498143 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/service/AuthCredentialAdapter.java +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthCredentialAdapter.java @@ -1,15 +1,15 @@ -package com.gltkorea.icebang.domain.auth.service; +package site.icebang.domain.auth.service; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.security.core.userdetails.UsernameNotFoundException; import org.springframework.stereotype.Service; -import com.gltkorea.icebang.domain.auth.model.AuthCredential; -import com.gltkorea.icebang.mapper.AuthMapper; - import lombok.RequiredArgsConstructor; +import site.icebang.domain.auth.mapper.AuthMapper; +import site.icebang.domain.auth.model.AuthCredential; + @Service @RequiredArgsConstructor public class AuthCredentialAdapter implements UserDetailsService { diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/service/AuthService.java b/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthService.java similarity index 81% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/service/AuthService.java rename to apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthService.java index 18010ed5..091861b2 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/auth/service/AuthService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthService.java @@ -1,17 +1,17 @@ -package com.gltkorea.icebang.domain.auth.service; +package site.icebang.domain.auth.service; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import com.gltkorea.icebang.common.utils.RandomPasswordGenerator; -import com.gltkorea.icebang.domain.auth.dto.RegisterDto; -import com.gltkorea.icebang.domain.email.dto.EmailRequest; -import com.gltkorea.icebang.domain.email.service.EmailService; -import com.gltkorea.icebang.mapper.AuthMapper; - import lombok.RequiredArgsConstructor; +import site.icebang.common.utils.RandomPasswordGenerator; +import site.icebang.domain.auth.dto.RegisterDto; +import site.icebang.domain.auth.mapper.AuthMapper; +import site.icebang.domain.email.dto.EmailRequest; +import site.icebang.domain.email.service.EmailService; + @Service @RequiredArgsConstructor @Transactional diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/department/dto/DepartmentCardDo.java b/apps/user-service/src/main/java/site/icebang/domain/department/dto/DepartmentCardDo.java similarity index 81% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/department/dto/DepartmentCardDo.java rename to apps/user-service/src/main/java/site/icebang/domain/department/dto/DepartmentCardDo.java index e891e966..7644eb8e 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/department/dto/DepartmentCardDo.java +++ b/apps/user-service/src/main/java/site/icebang/domain/department/dto/DepartmentCardDo.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.department.dto; +package site.icebang.domain.department.dto; import java.math.BigInteger; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/dto/EmailRequest.java b/apps/user-service/src/main/java/site/icebang/domain/email/dto/EmailRequest.java similarity index 85% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/dto/EmailRequest.java rename to apps/user-service/src/main/java/site/icebang/domain/email/dto/EmailRequest.java index fbd25749..89898055 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/dto/EmailRequest.java +++ b/apps/user-service/src/main/java/site/icebang/domain/email/dto/EmailRequest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.email.dto; +package site.icebang.domain.email.dto; import java.util.List; diff --git a/apps/user-service/src/main/java/site/icebang/domain/email/service/EmailService.java b/apps/user-service/src/main/java/site/icebang/domain/email/service/EmailService.java new file mode 100644 index 00000000..51646cc3 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/email/service/EmailService.java @@ -0,0 +1,7 @@ +package site.icebang.domain.email.service; + +import site.icebang.domain.email.dto.EmailRequest; + +public interface EmailService { + void send(EmailRequest emailRequest); +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/EmailServiceImpl.java b/apps/user-service/src/main/java/site/icebang/domain/email/service/EmailServiceImpl.java similarity index 77% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/EmailServiceImpl.java rename to apps/user-service/src/main/java/site/icebang/domain/email/service/EmailServiceImpl.java index a992de13..82047271 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/EmailServiceImpl.java +++ b/apps/user-service/src/main/java/site/icebang/domain/email/service/EmailServiceImpl.java @@ -1,12 +1,12 @@ -package com.gltkorea.icebang.domain.email.service; +package site.icebang.domain.email.service; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.stereotype.Service; -import com.gltkorea.icebang.domain.email.dto.EmailRequest; - import lombok.RequiredArgsConstructor; +import site.icebang.domain.email.dto.EmailRequest; + @Service @RequiredArgsConstructor @ConditionalOnMissingBean(EmailService.class) diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/MockEmailService.java b/apps/user-service/src/main/java/site/icebang/domain/email/service/MockEmailService.java similarity index 82% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/MockEmailService.java rename to apps/user-service/src/main/java/site/icebang/domain/email/service/MockEmailService.java index 527bb752..d4392fe5 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/email/service/MockEmailService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/email/service/MockEmailService.java @@ -1,12 +1,12 @@ -package com.gltkorea.icebang.domain.email.service; +package site.icebang.domain.email.service; import org.springframework.context.annotation.Profile; import org.springframework.stereotype.Service; -import com.gltkorea.icebang.domain.email.dto.EmailRequest; - import lombok.extern.slf4j.Slf4j; +import site.icebang.domain.email.dto.EmailRequest; + @Service @Profile({"test-unit", "test-e2e", "test-integration", "local", "develop"}) @Slf4j diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/controller/OrganizationController.java b/apps/user-service/src/main/java/site/icebang/domain/organization/controller/OrganizationController.java similarity index 75% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/controller/OrganizationController.java rename to apps/user-service/src/main/java/site/icebang/domain/organization/controller/OrganizationController.java index 7375fcbe..16ccbb65 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/controller/OrganizationController.java +++ b/apps/user-service/src/main/java/site/icebang/domain/organization/controller/OrganizationController.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.organization.controller; +package site.icebang.domain.organization.controller; import java.math.BigInteger; import java.util.List; @@ -9,13 +9,13 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; -import com.gltkorea.icebang.common.dto.ApiResponse; -import com.gltkorea.icebang.domain.organization.dto.OrganizationCardDto; -import com.gltkorea.icebang.domain.organization.dto.OrganizationOptionDto; -import com.gltkorea.icebang.domain.organization.service.OrganizationService; - import lombok.RequiredArgsConstructor; +import site.icebang.common.dto.ApiResponse; +import site.icebang.domain.organization.dto.OrganizationCardDto; +import site.icebang.domain.organization.dto.OrganizationOptionDto; +import site.icebang.domain.organization.service.OrganizationService; + @RequestMapping("/v0/organizations") @RequiredArgsConstructor @RestController diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/organization/dto/OrganizationCardDto.java similarity index 81% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationCardDto.java rename to apps/user-service/src/main/java/site/icebang/domain/organization/dto/OrganizationCardDto.java index af0ef64b..a957adc0 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationCardDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/organization/dto/OrganizationCardDto.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.organization.dto; +package site.icebang.domain.organization.dto; import java.math.BigInteger; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationOptionDto.java b/apps/user-service/src/main/java/site/icebang/domain/organization/dto/OrganizationOptionDto.java similarity index 53% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationOptionDto.java rename to apps/user-service/src/main/java/site/icebang/domain/organization/dto/OrganizationOptionDto.java index d31534eb..d7e670eb 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/dto/OrganizationOptionDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/organization/dto/OrganizationOptionDto.java @@ -1,15 +1,15 @@ -package com.gltkorea.icebang.domain.organization.dto; +package site.icebang.domain.organization.dto; import java.util.List; -import com.gltkorea.icebang.domain.department.dto.DepartmentCardDo; -import com.gltkorea.icebang.domain.position.dto.PositionCardDto; -import com.gltkorea.icebang.domain.roles.dto.RoleCardDto; - import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; +import site.icebang.domain.department.dto.DepartmentCardDo; +import site.icebang.domain.position.dto.PositionCardDto; +import site.icebang.domain.roles.dto.RoleCardDto; + @Builder @Data @AllArgsConstructor diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/OrganizationMapper.java b/apps/user-service/src/main/java/site/icebang/domain/organization/mapper/OrganizationMapper.java similarity index 65% rename from apps/user-service/src/main/java/com/gltkorea/icebang/mapper/OrganizationMapper.java rename to apps/user-service/src/main/java/site/icebang/domain/organization/mapper/OrganizationMapper.java index a7624bc6..ed504cca 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/OrganizationMapper.java +++ b/apps/user-service/src/main/java/site/icebang/domain/organization/mapper/OrganizationMapper.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.mapper; +package site.icebang.domain.organization.mapper; import java.math.BigInteger; import java.util.List; @@ -6,10 +6,10 @@ import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; -import com.gltkorea.icebang.domain.department.dto.DepartmentCardDo; -import com.gltkorea.icebang.domain.organization.dto.OrganizationCardDto; -import com.gltkorea.icebang.domain.position.dto.PositionCardDto; -import com.gltkorea.icebang.domain.roles.dto.RoleCardDto; +import site.icebang.domain.department.dto.DepartmentCardDo; +import site.icebang.domain.organization.dto.OrganizationCardDto; +import site.icebang.domain.position.dto.PositionCardDto; +import site.icebang.domain.roles.dto.RoleCardDto; @Mapper public interface OrganizationMapper { diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/service/OrganizationService.java b/apps/user-service/src/main/java/site/icebang/domain/organization/service/OrganizationService.java similarity index 68% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/service/OrganizationService.java rename to apps/user-service/src/main/java/site/icebang/domain/organization/service/OrganizationService.java index 84bcc54c..cc035935 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/organization/service/OrganizationService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/organization/service/OrganizationService.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.organization.service; +package site.icebang.domain.organization.service; import java.math.BigInteger; import java.util.List; @@ -6,15 +6,15 @@ import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import com.gltkorea.icebang.domain.department.dto.DepartmentCardDo; -import com.gltkorea.icebang.domain.organization.dto.OrganizationCardDto; -import com.gltkorea.icebang.domain.organization.dto.OrganizationOptionDto; -import com.gltkorea.icebang.domain.position.dto.PositionCardDto; -import com.gltkorea.icebang.domain.roles.dto.RoleCardDto; -import com.gltkorea.icebang.mapper.OrganizationMapper; - import lombok.RequiredArgsConstructor; +import site.icebang.domain.department.dto.DepartmentCardDo; +import site.icebang.domain.organization.dto.OrganizationCardDto; +import site.icebang.domain.organization.dto.OrganizationOptionDto; +import site.icebang.domain.organization.mapper.OrganizationMapper; +import site.icebang.domain.position.dto.PositionCardDto; +import site.icebang.domain.roles.dto.RoleCardDto; + @Service @RequiredArgsConstructor public class OrganizationService { diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/position/dto/PositionCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/position/dto/PositionCardDto.java similarity index 84% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/position/dto/PositionCardDto.java rename to apps/user-service/src/main/java/site/icebang/domain/position/dto/PositionCardDto.java index e97d7d3f..104b0cab 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/position/dto/PositionCardDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/position/dto/PositionCardDto.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.position.dto; +package site.icebang.domain.position.dto; import java.math.BigInteger; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/roles/dto/RoleCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/roles/dto/RoleCardDto.java similarity index 86% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/roles/dto/RoleCardDto.java rename to apps/user-service/src/main/java/site/icebang/domain/roles/dto/RoleCardDto.java index 5d468be5..737c8ed4 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/roles/dto/RoleCardDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/roles/dto/RoleCardDto.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.roles.dto; +package site.icebang.domain.roles.dto; import java.math.BigInteger; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/ScheduleMapper.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java similarity index 63% rename from apps/user-service/src/main/java/com/gltkorea/icebang/mapper/ScheduleMapper.java rename to apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java index 7220dc9e..c757fc36 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/ScheduleMapper.java +++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java @@ -1,10 +1,10 @@ -package com.gltkorea.icebang.mapper; +package site.icebang.domain.schedule.mapper; import java.util.List; import org.apache.ibatis.annotations.Mapper; -import com.gltkorea.icebang.domain.schedule.model.Schedule; +import site.icebang.domain.schedule.model.Schedule; @Mapper public interface ScheduleMapper { diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/model/Schedule.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java similarity index 81% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/model/Schedule.java rename to apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java index b9400b88..65c48366 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/model/Schedule.java +++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.schedule.model; +package site.icebang.domain.schedule.model; import lombok.Getter; import lombok.Setter; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/runner/SchedulerInitializer.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/runner/SchedulerInitializer.java similarity index 77% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/runner/SchedulerInitializer.java rename to apps/user-service/src/main/java/site/icebang/domain/schedule/runner/SchedulerInitializer.java index 7f96bba8..0dfb8b33 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/runner/SchedulerInitializer.java +++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/runner/SchedulerInitializer.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.schedule.runner; +package site.icebang.domain.schedule.runner; import java.util.List; @@ -6,13 +6,13 @@ import org.springframework.boot.ApplicationRunner; import org.springframework.stereotype.Component; -import com.gltkorea.icebang.domain.schedule.model.Schedule; -import com.gltkorea.icebang.domain.schedule.service.DynamicSchedulerService; -import com.gltkorea.icebang.mapper.ScheduleMapper; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; +import site.icebang.domain.schedule.mapper.ScheduleMapper; +import site.icebang.domain.schedule.model.Schedule; +import site.icebang.domain.schedule.service.DynamicSchedulerService; + @Slf4j @Component @RequiredArgsConstructor diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/service/DynamicSchedulerService.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/service/DynamicSchedulerService.java similarity index 95% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/service/DynamicSchedulerService.java rename to apps/user-service/src/main/java/site/icebang/domain/schedule/service/DynamicSchedulerService.java index a8bbeff1..372e0e1d 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/schedule/service/DynamicSchedulerService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/service/DynamicSchedulerService.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.schedule.service; +package site.icebang.domain.schedule.service; import java.time.LocalDateTime; import java.util.Map; @@ -13,11 +13,11 @@ import org.springframework.scheduling.support.CronTrigger; import org.springframework.stereotype.Service; -import com.gltkorea.icebang.domain.schedule.model.Schedule; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; +import site.icebang.domain.schedule.model.Schedule; + @Slf4j @Service @RequiredArgsConstructor diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/controller/UserController.java b/apps/user-service/src/main/java/site/icebang/domain/user/controller/UserController.java similarity index 70% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/controller/UserController.java rename to apps/user-service/src/main/java/site/icebang/domain/user/controller/UserController.java index 534e9ba6..db9b3fcf 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/controller/UserController.java +++ b/apps/user-service/src/main/java/site/icebang/domain/user/controller/UserController.java @@ -1,18 +1,18 @@ -package com.gltkorea.icebang.domain.user.controller; +package site.icebang.domain.user.controller; import org.springframework.security.core.annotation.AuthenticationPrincipal; import org.springframework.web.bind.annotation.*; -import com.gltkorea.icebang.common.dto.ApiResponse; -import com.gltkorea.icebang.domain.auth.model.AuthCredential; -import com.gltkorea.icebang.domain.user.dto.CheckEmailRequest; -import com.gltkorea.icebang.domain.user.dto.CheckEmailResponse; -import com.gltkorea.icebang.domain.user.dto.UserProfileResponseDto; -import com.gltkorea.icebang.domain.user.service.UserService; - import jakarta.validation.Valid; import lombok.RequiredArgsConstructor; +import site.icebang.common.dto.ApiResponse; +import site.icebang.domain.auth.model.AuthCredential; +import site.icebang.domain.user.dto.CheckEmailRequest; +import site.icebang.domain.user.dto.CheckEmailResponse; +import site.icebang.domain.user.dto.UserProfileResponseDto; +import site.icebang.domain.user.service.UserService; + @RestController @RequestMapping("/v0/users") @RequiredArgsConstructor diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/CheckEmailRequest.java b/apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailRequest.java similarity index 88% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/CheckEmailRequest.java rename to apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailRequest.java index 49208315..f3b2c2a1 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/CheckEmailRequest.java +++ b/apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailRequest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.user.dto; +package site.icebang.domain.user.dto; import jakarta.validation.constraints.Email; import jakarta.validation.constraints.NotBlank; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/CheckEmailResponse.java b/apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailResponse.java similarity index 73% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/CheckEmailResponse.java rename to apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailResponse.java index 8b92d187..adda35d4 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/CheckEmailResponse.java +++ b/apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailResponse.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.domain.user.dto; +package site.icebang.domain.user.dto; import lombok.Builder; import lombok.Data; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/UserProfileResponseDto.java b/apps/user-service/src/main/java/site/icebang/domain/user/dto/UserProfileResponseDto.java similarity index 87% rename from apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/UserProfileResponseDto.java rename to apps/user-service/src/main/java/site/icebang/domain/user/dto/UserProfileResponseDto.java index 9254ace7..6058f3aa 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/domain/user/dto/UserProfileResponseDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/user/dto/UserProfileResponseDto.java @@ -1,12 +1,12 @@ -package com.gltkorea.icebang.domain.user.dto; +package site.icebang.domain.user.dto; import java.math.BigInteger; import java.util.List; -import com.gltkorea.icebang.domain.auth.model.AuthCredential; - import lombok.Getter; +import site.icebang.domain.auth.model.AuthCredential; + @Getter public class UserProfileResponseDto { diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/UserMapper.java b/apps/user-service/src/main/java/site/icebang/domain/user/mapper/UserMapper.java similarity index 87% rename from apps/user-service/src/main/java/com/gltkorea/icebang/mapper/UserMapper.java rename to apps/user-service/src/main/java/site/icebang/domain/user/mapper/UserMapper.java index 734fe8d5..d2e14012 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/mapper/UserMapper.java +++ b/apps/user-service/src/main/java/site/icebang/domain/user/mapper/UserMapper.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.mapper; +package site.icebang.domain.user.mapper; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; diff --git a/apps/user-service/src/main/java/site/icebang/domain/user/service/UserService.java b/apps/user-service/src/main/java/site/icebang/domain/user/service/UserService.java new file mode 100644 index 00000000..e3dce655 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/user/service/UserService.java @@ -0,0 +1,21 @@ +package site.icebang.domain.user.service; + +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import jakarta.validation.Valid; +import lombok.RequiredArgsConstructor; + +import site.icebang.domain.user.dto.CheckEmailRequest; +import site.icebang.domain.user.mapper.UserMapper; + +@Service +@RequiredArgsConstructor +public class UserService { + private final UserMapper userMapper; + + @Transactional(readOnly = true) + public Boolean isExistEmail(@Valid CheckEmailRequest request) { + return userMapper.existsByEmail(request.getEmail()); + } +} diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/aop/logging/LoggingAspect.java b/apps/user-service/src/main/java/site/icebang/global/aop/logging/LoggingAspect.java similarity index 87% rename from apps/user-service/src/main/java/com/gltkorea/icebang/aop/logging/LoggingAspect.java rename to apps/user-service/src/main/java/site/icebang/global/aop/logging/LoggingAspect.java index 0441820d..126c7d35 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/aop/logging/LoggingAspect.java +++ b/apps/user-service/src/main/java/site/icebang/global/aop/logging/LoggingAspect.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.aop.logging; +package site.icebang.global.aop.logging; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; @@ -13,13 +13,13 @@ @Component public class LoggingAspect { - @Pointcut("execution(public * com.gltkorea.icebang..controller..*(..))") + @Pointcut("execution(public * site.icebang..controller..*(..))") public void controllerMethods() {} - @Pointcut("execution(public * com.gltkorea.icebang..service..*(..))") + @Pointcut("execution(public * site.icebang..service..*(..))") public void serviceMethods() {} - @Pointcut("execution(public * com.gltkorea.icebang..service..repository..*(..))") + @Pointcut("execution(public * site.icebang..service..mapper..*(..))") public void repositoryMethods() {} @Around("controllerMethods()") diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/WebConfig.java b/apps/user-service/src/main/java/site/icebang/global/config/WebConfig.java similarity index 96% rename from apps/user-service/src/main/java/com/gltkorea/icebang/config/WebConfig.java rename to apps/user-service/src/main/java/site/icebang/global/config/WebConfig.java index 1ed10098..22fd4be8 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/config/WebConfig.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/WebConfig.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.config; +package site.icebang.global.config; import java.time.Duration; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/mybatis/typehandler/StringListTypeHandler.java b/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/StringListTypeHandler.java similarity index 96% rename from apps/user-service/src/main/java/com/gltkorea/icebang/config/mybatis/typehandler/StringListTypeHandler.java rename to apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/StringListTypeHandler.java index 4363124c..6aba5d96 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/config/mybatis/typehandler/StringListTypeHandler.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/StringListTypeHandler.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.config.mybatis.typehandler; +package site.icebang.global.config.mybatis.typehandler; import java.sql.CallableStatement; import java.sql.PreparedStatement; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/scheduler/SchedulerConfig.java b/apps/user-service/src/main/java/site/icebang/global/config/scheduler/SchedulerConfig.java similarity index 95% rename from apps/user-service/src/main/java/com/gltkorea/icebang/config/scheduler/SchedulerConfig.java rename to apps/user-service/src/main/java/site/icebang/global/config/scheduler/SchedulerConfig.java index 592eb0d7..79fc6436 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/config/scheduler/SchedulerConfig.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/scheduler/SchedulerConfig.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.config.scheduler; +package site.icebang.global.config.scheduler; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/SecurityConfig.java b/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java similarity index 96% rename from apps/user-service/src/main/java/com/gltkorea/icebang/config/security/SecurityConfig.java rename to apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java index bde09f6e..457e388d 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/SecurityConfig.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.config.security; +package site.icebang.global.config.security; import java.security.SecureRandom; @@ -19,11 +19,11 @@ import org.springframework.web.cors.UrlBasedCorsConfigurationSource; import org.springframework.web.filter.CorsFilter; -import com.gltkorea.icebang.config.security.endpoints.SecurityEndpoints; -import com.gltkorea.icebang.domain.auth.service.AuthCredentialAdapter; - import lombok.RequiredArgsConstructor; +import site.icebang.domain.auth.service.AuthCredentialAdapter; +import site.icebang.global.config.security.endpoints.SecurityEndpoints; + @Configuration @RequiredArgsConstructor public class SecurityConfig { diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java b/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java similarity index 94% rename from apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java rename to apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java index 5236edef..019337dc 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/config/security/endpoints/SecurityEndpoints.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.config.security.endpoints; +package site.icebang.global.config.security.endpoints; public enum SecurityEndpoints { PUBLIC( diff --git a/apps/user-service/src/main/java/com/gltkorea/icebang/filter/LoggingFilter.java b/apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java similarity index 97% rename from apps/user-service/src/main/java/com/gltkorea/icebang/filter/LoggingFilter.java rename to apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java index e8dda321..e89f2d80 100644 --- a/apps/user-service/src/main/java/com/gltkorea/icebang/filter/LoggingFilter.java +++ b/apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.filter; +package site.icebang.global.filter; import java.io.IOException; import java.util.UUID; diff --git a/apps/user-service/src/main/resources/application-develop.yml b/apps/user-service/src/main/resources/application-develop.yml index 6d9a8ea3..e7bc3f09 100644 --- a/apps/user-service/src/main/resources/application-develop.yml +++ b/apps/user-service/src/main/resources/application-develop.yml @@ -37,7 +37,7 @@ spring: mybatis: mapper-locations: classpath:mybatis/mapper/**/*.xml - type-aliases-package: com.gltkorea.icebang.dto + type-aliases-package: site.icebang.dto configuration: map-underscore-to-camel-case: true diff --git a/apps/user-service/src/main/resources/application-test-e2e.yml b/apps/user-service/src/main/resources/application-test-e2e.yml index 8759b298..f7dceba9 100644 --- a/apps/user-service/src/main/resources/application-test-e2e.yml +++ b/apps/user-service/src/main/resources/application-test-e2e.yml @@ -13,7 +13,7 @@ spring: mybatis: mapper-locations: classpath:mybatis/mapper/**/*.xml - type-aliases-package: com.gltkorea.icebang.dto + type-aliases-package: site.icebang.dto configuration: map-underscore-to-camel-case: true diff --git a/apps/user-service/src/main/resources/application-test-integration.yml b/apps/user-service/src/main/resources/application-test-integration.yml index 6625974a..0ed34f36 100644 --- a/apps/user-service/src/main/resources/application-test-integration.yml +++ b/apps/user-service/src/main/resources/application-test-integration.yml @@ -34,7 +34,7 @@ spring: mybatis: mapper-locations: classpath:mybatis/mapper/**/*.xml - type-aliases-package: com.gltkorea.icebang.dto + type-aliases-package: site.icebang.dto configuration: map-underscore-to-camel-case: true diff --git a/apps/user-service/src/main/resources/application-test-unit.yml b/apps/user-service/src/main/resources/application-test-unit.yml index cd4e018f..d9a8059b 100644 --- a/apps/user-service/src/main/resources/application-test-unit.yml +++ b/apps/user-service/src/main/resources/application-test-unit.yml @@ -35,7 +35,7 @@ spring: mybatis: mapper-locations: classpath:mybatis/mapper/**/*.xml - type-aliases-package: com.gltkorea.icebang.dto + type-aliases-package: site.icebang.dto configuration: map-underscore-to-camel-case: true diff --git a/apps/user-service/src/main/resources/application.yml b/apps/user-service/src/main/resources/application.yml index c8314375..d0357684 100644 --- a/apps/user-service/src/main/resources/application.yml +++ b/apps/user-service/src/main/resources/application.yml @@ -10,4 +10,4 @@ spring: mybatis: # Mapper XML 파일 위치 mapper-locations: classpath:mapper/**/*.xml - type-handlers-package: com.gltkorea.icebang.config.mybatis.typehandler \ No newline at end of file + type-handlers-package: site.icebang.config.mybatis.typehandler \ No newline at end of file diff --git a/apps/user-service/src/main/resources/log4j2-develop.yml b/apps/user-service/src/main/resources/log4j2-develop.yml index 1b5c6e35..f900c3b1 100644 --- a/apps/user-service/src/main/resources/log4j2-develop.yml +++ b/apps/user-service/src/main/resources/log4j2-develop.yml @@ -89,7 +89,7 @@ Configuration: - ref: file-error-appender # 2. 애플리케이션 로그 - - name: com.gltkorea.icebang + - name: site.icebang additivity: "false" level: TRACE AppenderRef: @@ -125,7 +125,7 @@ Configuration: - ref: console-appender - ref: file-info-appender - - name: com.gltkorea.icebang.domain.auth.mapper + - name: site.icebang.domain.auth.mapper level: DEBUG additivity: "false" AppenderRef: diff --git a/apps/user-service/src/main/resources/log4j2-production.yml b/apps/user-service/src/main/resources/log4j2-production.yml index d1afc02b..31393458 100644 --- a/apps/user-service/src/main/resources/log4j2-production.yml +++ b/apps/user-service/src/main/resources/log4j2-production.yml @@ -89,7 +89,7 @@ Configuration: - ref: file-error-appender # 2. 애플리케이션 로그 - - name: com.gltkorea.icebang + - name: site.icebang additivity: "false" level: TRACE AppenderRef: diff --git a/apps/user-service/src/main/resources/log4j2-test-unit.yml b/apps/user-service/src/main/resources/log4j2-test-unit.yml index 80df15cd..ef740431 100644 --- a/apps/user-service/src/main/resources/log4j2-test-unit.yml +++ b/apps/user-service/src/main/resources/log4j2-test-unit.yml @@ -38,7 +38,7 @@ Configuration: - ref: console-appender # 2. 애플리케이션 로그 - - name: com.gltkorea.icebang + - name: site.icebang additivity: "false" level: INFO AppenderRef: diff --git a/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml index 154dbb39..d98c7299 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/AuthMapper.xml @@ -2,7 +2,7 @@ - + - SELECT u.id, u.email, @@ -30,17 +30,17 @@ LIMIT 1 - + INSERT INTO user (name, email, password) VALUES (#{name}, #{email}, #{password}); - + INSERT INTO user_organization (user_id, organization_id, department_id, position_id, status) VALUES (#{id}, #{orgId}, #{deptId}, #{positionId}, #{status}); - + INSERT INTO user_role (user_organization_id, role_id) VALUES diff --git a/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml index 740b81a3..40abe4d5 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/OrganizationMapper.xml @@ -2,10 +2,10 @@ - + SELECT + diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/TestUserServiceApplication.java b/apps/user-service/src/test/java/site/icebang/TestUserServiceApplication.java similarity index 90% rename from apps/user-service/src/test/java/com/gltkorea/icebang/TestUserServiceApplication.java rename to apps/user-service/src/test/java/site/icebang/TestUserServiceApplication.java index f53fa0a9..ba8c2403 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/TestUserServiceApplication.java +++ b/apps/user-service/src/test/java/site/icebang/TestUserServiceApplication.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang; +package site.icebang; import org.springframework.boot.SpringApplication; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/TestcontainersConfiguration.java b/apps/user-service/src/test/java/site/icebang/TestcontainersConfiguration.java similarity index 83% rename from apps/user-service/src/test/java/com/gltkorea/icebang/TestcontainersConfiguration.java rename to apps/user-service/src/test/java/site/icebang/TestcontainersConfiguration.java index bbe8ed02..b9eb7b76 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/TestcontainersConfiguration.java +++ b/apps/user-service/src/test/java/site/icebang/TestcontainersConfiguration.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang; +package site.icebang; import org.springframework.boot.test.context.TestConfiguration; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/ContextLoadE2eTests.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/ContextLoadE2eTests.java similarity index 54% rename from apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/ContextLoadE2eTests.java rename to apps/user-service/src/test/java/site/icebang/e2e/scenario/ContextLoadE2eTests.java index ad6bfbf0..29e5857c 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/ContextLoadE2eTests.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/ContextLoadE2eTests.java @@ -1,8 +1,8 @@ -package com.gltkorea.icebang.e2e.scenario; +package site.icebang.e2e.scenario; import org.junit.jupiter.api.Test; -import com.gltkorea.icebang.e2e.setup.support.E2eTestSupport; +import site.icebang.e2e.setup.support.E2eTestSupport; class ContextLoadE2eTests extends E2eTestSupport { diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java similarity index 99% rename from apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java rename to apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java index 762d5ca4..a873d2d5 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.e2e.scenario; +package site.icebang.e2e.scenario; import static org.assertj.core.api.Assertions.*; @@ -13,7 +13,7 @@ import org.springframework.http.*; import org.springframework.test.context.jdbc.Sql; -import com.gltkorea.icebang.e2e.setup.support.E2eTestSupport; +import site.icebang.e2e.setup.support.E2eTestSupport; @Sql( value = "classpath:sql/01-insert-internal-users.sql", diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/annotation/E2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/annotation/E2eTest.java similarity index 88% rename from apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/annotation/E2eTest.java rename to apps/user-service/src/test/java/site/icebang/e2e/setup/annotation/E2eTest.java index 0f087064..e7d3ef09 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/annotation/E2eTest.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/annotation/E2eTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.e2e.setup.annotation; +package site.icebang.e2e.setup.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/config/E2eTestConfiguration.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java similarity index 97% rename from apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/config/E2eTestConfiguration.java rename to apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java index 4ee26803..4976d0b8 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/config/E2eTestConfiguration.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.e2e.setup.config; +package site.icebang.e2e.setup.config; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.testcontainers.service.connection.ServiceConnection; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/support/E2eTestSupport.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java similarity index 91% rename from apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/support/E2eTestSupport.java rename to apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java index b72ac031..c2d10870 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/support/E2eTestSupport.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.e2e.setup.support; +package site.icebang.e2e.setup.support; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; @@ -9,8 +9,8 @@ import org.springframework.web.context.WebApplicationContext; import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper; -import com.gltkorea.icebang.e2e.setup.annotation.E2eTest; -import com.gltkorea.icebang.e2e.setup.config.E2eTestConfiguration; +import site.icebang.e2e.setup.annotation.E2eTest; +import site.icebang.e2e.setup.config.E2eTestConfiguration; @Import(E2eTestConfiguration.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/support/E2eTestSupportTest.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupportTest.java similarity index 90% rename from apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/support/E2eTestSupportTest.java rename to apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupportTest.java index 33bfd4dc..f9fe164e 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/e2e/setup/support/E2eTestSupportTest.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupportTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.e2e.setup.support; +package site.icebang.e2e.setup.support; import static org.assertj.core.api.Assertions.assertThat; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/annotation/IntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/setup/annotation/IntegrationTest.java similarity index 87% rename from apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/annotation/IntegrationTest.java rename to apps/user-service/src/test/java/site/icebang/integration/setup/annotation/IntegrationTest.java index ec111866..77dfddf9 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/annotation/IntegrationTest.java +++ b/apps/user-service/src/test/java/site/icebang/integration/setup/annotation/IntegrationTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.integration.setup.annotation; +package site.icebang.integration.setup.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/config/RestDocsConfiguration.java b/apps/user-service/src/test/java/site/icebang/integration/setup/config/RestDocsConfiguration.java similarity index 94% rename from apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/config/RestDocsConfiguration.java rename to apps/user-service/src/test/java/site/icebang/integration/setup/config/RestDocsConfiguration.java index eeb97ffc..f60de9cc 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/config/RestDocsConfiguration.java +++ b/apps/user-service/src/test/java/site/icebang/integration/setup/config/RestDocsConfiguration.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.integration.setup.config; +package site.icebang.integration.setup.config; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/support/IntegrationTestSupport.java b/apps/user-service/src/test/java/site/icebang/integration/setup/support/IntegrationTestSupport.java similarity index 84% rename from apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/support/IntegrationTestSupport.java rename to apps/user-service/src/test/java/site/icebang/integration/setup/support/IntegrationTestSupport.java index 037f37e5..ca28cd37 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/setup/support/IntegrationTestSupport.java +++ b/apps/user-service/src/test/java/site/icebang/integration/setup/support/IntegrationTestSupport.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.integration.setup.support; +package site.icebang.integration.setup.support; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.restdocs.AutoConfigureRestDocs; @@ -9,8 +9,9 @@ import org.springframework.test.web.servlet.MockMvc; import com.fasterxml.jackson.databind.ObjectMapper; -import com.gltkorea.icebang.integration.setup.annotation.IntegrationTest; -import com.gltkorea.icebang.integration.setup.config.RestDocsConfiguration; + +import site.icebang.integration.setup.annotation.IntegrationTest; +import site.icebang.integration.setup.config.RestDocsConfiguration; @IntegrationTest @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/tests/auth/AuthApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java similarity index 96% rename from apps/user-service/src/test/java/com/gltkorea/icebang/integration/tests/auth/AuthApiIntegrationTest.java rename to apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java index 0d1e5d19..5c538105 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/tests/auth/AuthApiIntegrationTest.java +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.integration.tests.auth; +package site.icebang.integration.tests.auth; import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; import static com.epages.restdocs.apispec.ResourceDocumentation.*; @@ -18,7 +18,8 @@ import org.springframework.transaction.annotation.Transactional; import com.epages.restdocs.apispec.ResourceSnippetParameters; -import com.gltkorea.icebang.integration.setup.support.IntegrationTestSupport; + +import site.icebang.integration.setup.support.IntegrationTestSupport; @Sql( value = "classpath:sql/01-insert-internal-users.sql", diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java similarity index 98% rename from apps/user-service/src/test/java/com/gltkorea/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java rename to apps/user-service/src/test/java/site/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java index 5d458146..666a8ea5 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.integration.tests.organization; +package site.icebang.integration.tests.organization; import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; import static com.epages.restdocs.apispec.ResourceDocumentation.*; @@ -17,7 +17,8 @@ import com.epages.restdocs.apispec.ResourceSnippetParameters; import com.fasterxml.jackson.databind.JsonNode; -import com.gltkorea.icebang.integration.setup.support.IntegrationTestSupport; + +import site.icebang.integration.setup.support.IntegrationTestSupport; @Sql( value = { diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/unit/setup/annotation/UnitTest.java b/apps/user-service/src/test/java/site/icebang/unit/setup/annotation/UnitTest.java similarity index 88% rename from apps/user-service/src/test/java/com/gltkorea/icebang/unit/setup/annotation/UnitTest.java rename to apps/user-service/src/test/java/site/icebang/unit/setup/annotation/UnitTest.java index cb2f975c..65afc91b 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/unit/setup/annotation/UnitTest.java +++ b/apps/user-service/src/test/java/site/icebang/unit/setup/annotation/UnitTest.java @@ -1,4 +1,4 @@ -package com.gltkorea.icebang.unit.setup.annotation; +package site.icebang.unit.setup.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/apps/user-service/src/test/java/com/gltkorea/icebang/unit/setup/support/UnitTestSupport.java b/apps/user-service/src/test/java/site/icebang/unit/setup/support/UnitTestSupport.java similarity index 60% rename from apps/user-service/src/test/java/com/gltkorea/icebang/unit/setup/support/UnitTestSupport.java rename to apps/user-service/src/test/java/site/icebang/unit/setup/support/UnitTestSupport.java index 9bc71657..3977703c 100644 --- a/apps/user-service/src/test/java/com/gltkorea/icebang/unit/setup/support/UnitTestSupport.java +++ b/apps/user-service/src/test/java/site/icebang/unit/setup/support/UnitTestSupport.java @@ -1,6 +1,6 @@ -package com.gltkorea.icebang.unit.setup.support; +package site.icebang.unit.setup.support; -import com.gltkorea.icebang.unit.setup.annotation.UnitTest; +import site.icebang.unit.setup.annotation.UnitTest; @UnitTest public abstract class UnitTestSupport { From fdd24dc97cdf17e9cb13d743d6a50dbb4597b4bf Mon Sep 17 00:00:00 2001 From: thkim7 Date: Wed, 10 Sep 2025 16:14:40 +0900 Subject: [PATCH 20/31] =?UTF-8?q?chore:=20=EC=A4=91=EB=B3=B5=20=EC=9D=B8?= =?UTF-8?q?=EB=8D=B1=EC=8A=A4=20=EC=A0=9C=EA=B1=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/user-service/src/main/resources/sql/01-schema.sql | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/apps/user-service/src/main/resources/sql/01-schema.sql b/apps/user-service/src/main/resources/sql/01-schema.sql index 138425fc..569c452a 100644 --- a/apps/user-service/src/main/resources/sql/01-schema.sql +++ b/apps/user-service/src/main/resources/sql/01-schema.sql @@ -284,6 +284,4 @@ CREATE TABLE `task_run` ( INDEX `idx_task_run_job_run_id` (`job_run_id`), INDEX `idx_task_run_status` (`status`), INDEX `idx_task_run_task_id` (`task_id`) - ); - -CREATE INDEX `idx_task_io_data_task_run_id` ON `task_io_data` (`task_run_id`); \ No newline at end of file + ); \ No newline at end of file From 4bf5f56615ba90c89632f7f05b34828c689e0696 Mon Sep 17 00:00:00 2001 From: JiHoon Date: Wed, 10 Sep 2025 16:16:48 +0900 Subject: [PATCH 21/31] =?UTF-8?q?feat=20:=20Mariadb=20Manager=20=EC=9E=91?= =?UTF-8?q?=EC=84=B1=20=EB=B0=8F=20pytest=20=EC=9E=91=EC=84=B1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/db/MariadbManager.py | 0 .../app/db/mariadb_manager.py | 149 +++ .../app/test/test_mariadb_connection.py | 148 +++ apps/pre-processing-service/poetry.lock | 846 +++--------------- apps/pre-processing-service/pyproject.toml | 8 +- 5 files changed, 440 insertions(+), 711 deletions(-) delete mode 100644 apps/pre-processing-service/app/db/MariadbManager.py create mode 100644 apps/pre-processing-service/app/db/mariadb_manager.py create mode 100644 apps/pre-processing-service/app/test/test_mariadb_connection.py diff --git a/apps/pre-processing-service/app/db/MariadbManager.py b/apps/pre-processing-service/app/db/MariadbManager.py deleted file mode 100644 index e69de29b..00000000 diff --git a/apps/pre-processing-service/app/db/mariadb_manager.py b/apps/pre-processing-service/app/db/mariadb_manager.py new file mode 100644 index 00000000..225de471 --- /dev/null +++ b/apps/pre-processing-service/app/db/mariadb_manager.py @@ -0,0 +1,149 @@ +import pymysql +import os +import threading + +from contextlib import contextmanager +from dotenv import load_dotenv +from dbutils.pooled_db import PooledDB + +class MariadbManager: + """ + MariaDB 매니저 클래스 + 1. MariaDB 데이터베이스 연결 및 관리 + 2. 커넥션 풀링 지원 + 3. 커서 및 커넥션 컨텍스트 매니저 제공 + """ + + _instance = None + _lock = threading.Lock() + load_dotenv() + + def __new__(cls): + """ + 싱글톤 패턴 구현 + 스레드 안전성을 위해 Lock 사용 + Double-checked locking 적용 + """ + + if cls._instance is None: + with cls._lock: + if cls._instance is None: + cls._instance = super(MariadbManager, cls).__new__(cls) + cls._instance._initialized = False + return cls._instance + + def __init__(self): + """ + MariaDB 매니저 초기화 + 데이터베이스 연결 설정 + 환경 변수에서 데이터베이스 설정 로드 및 검증 + """ + + if self._initialized: + return + + self._config = { + 'host': os.getenv('DB_HOST', 'localhost'), + 'port': int(os.getenv('DB_PORT', '3306')), + 'database': os.getenv('DB_NAME', 'pre_process'), + 'user': os.getenv('DB_USER', 'mariadb'), + 'password': os.getenv('DB_PASSWORD', 'qwer1234'), + 'autocommit': False + } + + required_keys = ['host', 'database', 'user', 'password'] + missing = [k for k, v in self._config.items() + if k in required_keys and (v is None or v == '')] + if missing: + raise ValueError(f"필수 데이터베이스 설정이 누락되었습니다: {missing}") + + self._pool = None + self._initialized = True + + def _init_pool(self, pool_size=20): + """ + MariaDB 전용 커넥션 풀 초기화 + :param pool_size: 풀 크기 + """ + + if self._pool is None: + config = {**self._config} + try: + self._pool = PooledDB( + creator=pymysql, + maxconnections=pool_size, + mincached=2, + maxcached=5, + maxshared=3, + blocking=True, + maxusage=None, + setsession=[], + ping=0, + **config + ) + except pymysql.Error as e: + raise Exception(f"MariaDB 커넥션 풀 초기화 실패: {e}") + + @contextmanager + def get_cursor(self): + """ + 커서 컨텍스트 매니저 - 일반적인 쿼리용 + :return: 커서 객체 + """ + + if self._pool is None: + self._init_pool() + + try: + conn = self._pool.connection() + except Exception as e: + raise Exception(f"커넥션 풀에서 연결 획득 실패: {e}") + + cursor = None + try: + cursor = conn.cursor() + yield cursor + conn.commit() + except Exception as e: + if conn: + conn.rollback() + raise e + finally: + if cursor: + cursor.close() + if conn: + conn.close() + + @contextmanager + def get_connection(self): + """ + 커넥션 컨텍스트 매니저 + :return: 커넥션 객체 + """ + + if self._pool is None: + self._init_pool() + + try: + conn = self._pool.connection() + except Exception as e: + raise Exception(f"커넥션 풀에서 연결 획득 실패: {e}") + + try: + yield conn + conn.commit() + except Exception as e: + if conn: + conn.rollback() + raise e + finally: + if conn: + conn.close() + + def close_pool(self): + """ + 풀 종료 + """ + if self._pool: + self._pool.close() + self._pool = None diff --git a/apps/pre-processing-service/app/test/test_mariadb_connection.py b/apps/pre-processing-service/app/test/test_mariadb_connection.py new file mode 100644 index 00000000..43902fb4 --- /dev/null +++ b/apps/pre-processing-service/app/test/test_mariadb_connection.py @@ -0,0 +1,148 @@ +import pytest +import threading +from dotenv import load_dotenv + +from app.db.mariadb_manager import MariadbManager + + +class TestMariadbManager: + """ + MariaDB Manager 테스트 + 1. 싱글톤 패턴 확인 + 2. 환경변수 로드 테스트 + 3. 커넥션풀 초기화 테스트 + 4. 커서 컨텍스트 매니저 및 SELECT 1 테스트 + 5. 커넥션 컨텍스트 매니저 및 SELECT 1 테스트 + """ + + def setup_method(self): + """각 테스트 메서드 실행 전 초기화""" + + MariadbManager._instance = None + if hasattr(MariadbManager, '_initialized'): + MariadbManager._initialized = False + + def teardown_method(self): + """각 테스트 메서드 실행 후 정리""" + + if MariadbManager._instance and hasattr(MariadbManager._instance, '_pool'): + if MariadbManager._instance._pool: + MariadbManager._instance.close_pool() + MariadbManager._instance = None + + def test_singleton_pattern(self): + """싱글톤 패턴 확인 테스트""" + + manager1 = MariadbManager() + manager2 = MariadbManager() + + assert manager1 is manager2, "싱글톤 패턴이 제대로 작동하지 않습니다" + assert id(manager1) == id(manager2), "인스턴스 ID가 다릅니다" + + instances = [] + + def create_instance(): + instance = MariadbManager() + instances.append(instance) + + threads = [] + for i in range(5): + thread = threading.Thread(target=create_instance, name=f"Thread-{i}") + threads.append(thread) + thread.start() + + for thread in threads: + thread.join() + + for i, instance in enumerate(instances): + assert instance is manager1, f"스레드 {i}에서 생성된 인스턴스가 다릅니다" + + def test_environment_variables_load(self): + """환경변수 로드 테스트""" + + manager = MariadbManager() + config = manager._config + + required_keys = ['host', 'port', 'database', 'user', 'password'] + for key in required_keys: + assert key in config, f"필수 설정 {key}가 누락되었습니다" + assert config[key] is not None, f"설정 {key}의 값이 None입니다" + if isinstance(config[key], str): + assert config[key].strip() != '', f"설정 {key}의 값이 비어있습니다" + + assert isinstance(config['port'], int), "포트는 정수여야 합니다" + assert config['port'] > 0, "포트는 양수여야 합니다" + + def test_connection_pool_initialization(self): + """커넥션풀 초기화 테스트""" + + manager = MariadbManager() + + assert manager._pool is None, "초기 풀 상태가 None이 아닙니다" + + try: + manager._init_pool(pool_size=5) + except Exception as e: + pytest.fail(f"커넥션풀 초기화 실패: {e}") + + assert manager._pool is not None, "풀이 생성되지 않았습니다" + + try: + conn = manager._pool.connection() + conn.close() + except Exception as e: + pytest.fail(f"풀에서 연결 획득 실패: {e}") + + def test_cursor_context_manager_with_select1(self): + """커서 컨텍스트 매니저 및 SELECT 1 테스트""" + + manager = MariadbManager() + + try: + with manager.get_cursor() as cursor: + cursor.execute("SELECT 1") + result = cursor.fetchone() + + assert result is not None, "SELECT 1 결과가 None입니다" + assert result[0] == 1, f"SELECT 1 결과가 1이 아닙니다: {result[0]}" + + cursor.execute("SELECT NOW()") + time_result = cursor.fetchone() + assert time_result is not None, "NOW() 결과가 None입니다" + + cursor.execute("SELECT VERSION()") + version_result = cursor.fetchone() + assert version_result is not None, "VERSION() 결과가 None입니다" + + except Exception as e: + pytest.fail(f"커서 컨텍스트 매니저 테스트 실패: {e}") + + def test_connection_context_manager_with_select1(self): + """커넥션 컨텍스트 매니저 및 SELECT 1 테스트""" + + manager = MariadbManager() + + try: + with manager.get_connection() as conn: + cursor = conn.cursor() + + try: + cursor.execute("SELECT 1") + result = cursor.fetchone() + + assert result is not None, "SELECT 1 결과가 None입니다" + assert result[0] == 1, f"SELECT 1 결과가 1이 아닙니다: {result[0]}" + + cursor.execute("SELECT CONNECTION_ID()") + conn_info = cursor.fetchone() + assert conn_info is not None, "CONNECTION_ID() 결과가 None입니다" + + cursor.execute("SELECT USER()") + user_info = cursor.fetchone() + assert user_info is not None, "USER() 결과가 None입니다" + + finally: + cursor.close() + + except Exception as e: + pytest.fail(f"커넥션 컨텍스트 매니저 테스트 실패: {e}") diff --git a/apps/pre-processing-service/poetry.lock b/apps/pre-processing-service/poetry.lock index 70da2b53..09cb01d5 100644 --- a/apps/pre-processing-service/poetry.lock +++ b/apps/pre-processing-service/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. [[package]] name = "annotated-types" @@ -168,84 +168,101 @@ files = [ [[package]] name = "cffi" -version = "1.17.1" +version = "2.0.0" description = "Foreign Function Interface for Python calling C code." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] markers = "os_name == \"nt\" and implementation_name != \"pypy\"" files = [ - {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, - {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, - {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, - {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, - {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, - {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, - {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, - {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, - {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, - {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, - {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, - {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, - {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, - {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, - {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, - {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"}, + {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"}, + {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"}, + {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"}, + {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"}, + {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"}, + {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"}, + {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"}, + {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"}, + {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"}, + {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"}, + {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"}, + {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"}, + {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"}, + {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"}, + {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"}, + {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"}, + {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"}, + {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"}, + {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"}, + {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"}, ] [package.dependencies] -pycparser = "*" +pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} [[package]] name = "charset-normalizer" @@ -364,6 +381,23 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "dbutils" +version = "3.1.2" +description = "Database connections for multi-threaded environments." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "dbutils-3.1.2-py3-none-any.whl", hash = "sha256:0cb388a89eeecf04089aef113a7007c3fac9199e9580c8549829f954870c403a"}, + {file = "dbutils-3.1.2.tar.gz", hash = "sha256:160b5788154f1adeddc61080daff1530b4df2ba0d45af1c3bfbac76db24186b3"}, +] + +[package.extras] +docs = ["docutils"] +pg = ["PyGreSQL (>=5)"] +tests = ["pytest (>=7)", "ruff"] + [[package]] name = "dotenv" version = "0.9.9" @@ -690,36 +724,6 @@ files = [ {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] -[[package]] -name = "jinja2" -version = "3.1.6" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, - {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "joblib" -version = "1.5.2" -description = "Lightweight pipelining with Python functions" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241"}, - {file = "joblib-1.5.2.tar.gz", hash = "sha256:3faa5c39054b2f03ca547da9b2f52fde67c06240c31853f306aea97f13647b55"}, -] - [[package]] name = "loguru" version = "0.7.3" @@ -739,77 +743,6 @@ win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] -[[package]] -name = "markupsafe" -version = "3.0.2" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, - {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, -] - [[package]] name = "mecab-python3" version = "1.0.10" @@ -863,45 +796,6 @@ files = [ unidic = ["unidic"] unidic-lite = ["unidic-lite"] -[[package]] -name = "mpmath" -version = "1.3.0" -description = "Python library for arbitrary-precision floating-point arithmetic" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, - {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, -] - -[package.extras] -develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] -docs = ["sphinx"] -gmpy = ["gmpy2 (>=2.1.0a4) ; platform_python_implementation != \"PyPy\""] -tests = ["pytest (>=4.6)"] - -[[package]] -name = "networkx" -version = "3.5" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.11" -groups = ["main"] -files = [ - {file = "networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec"}, - {file = "networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037"}, -] - -[package.extras] -default = ["matplotlib (>=3.8)", "numpy (>=1.25)", "pandas (>=2.0)", "scipy (>=1.11.2)"] -developer = ["mypy (>=1.15)", "pre-commit (>=4.1)"] -doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=10)", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8.0)", "sphinx-gallery (>=0.18)", "texext (>=0.6.7)"] -example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=2.0.0)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] -extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)", "pytest-xdist (>=3.0)"] -test-extras = ["pytest-mpl", "pytest-randomly"] - [[package]] name = "numpy" version = "2.3.2" @@ -986,214 +880,6 @@ files = [ {file = "numpy-2.3.2.tar.gz", hash = "sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48"}, ] -[[package]] -name = "nvidia-cublas-cu12" -version = "12.8.4.1" -description = "CUBLAS native runtime libraries" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:b86f6dd8935884615a0683b663891d43781b819ac4f2ba2b0c9604676af346d0"}, - {file = "nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:8ac4e771d5a348c551b2a426eda6193c19aa630236b418086020df5ba9667142"}, - {file = "nvidia_cublas_cu12-12.8.4.1-py3-none-win_amd64.whl", hash = "sha256:47e9b82132fa8d2b4944e708049229601448aaad7e6f296f630f2d1a32de35af"}, -] - -[[package]] -name = "nvidia-cuda-cupti-cu12" -version = "12.8.90" -description = "CUDA profiling tools runtime libs." -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4412396548808ddfed3f17a467b104ba7751e6b58678a4b840675c56d21cf7ed"}, - {file = "nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea0cb07ebda26bb9b29ba82cda34849e73c166c18162d3913575b0c9db9a6182"}, - {file = "nvidia_cuda_cupti_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:bb479dcdf7e6d4f8b0b01b115260399bf34154a1a2e9fe11c85c517d87efd98e"}, -] - -[[package]] -name = "nvidia-cuda-nvrtc-cu12" -version = "12.8.93" -description = "NVRTC native runtime libraries" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:a7756528852ef889772a84c6cd89d41dfa74667e24cca16bb31f8f061e3e9994"}, - {file = "nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fc1fec1e1637854b4c0a65fb9a8346b51dd9ee69e61ebaccc82058441f15bce8"}, - {file = "nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-win_amd64.whl", hash = "sha256:7a4b6b2904850fe78e0bd179c4b655c404d4bb799ef03ddc60804247099ae909"}, -] - -[[package]] -name = "nvidia-cuda-runtime-cu12" -version = "12.8.90" -description = "CUDA Runtime native Libraries" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:52bf7bbee900262ffefe5e9d5a2a69a30d97e2bc5bb6cc866688caa976966e3d"}, - {file = "nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adade8dcbd0edf427b7204d480d6066d33902cab2a4707dcfc48a2d0fd44ab90"}, - {file = "nvidia_cuda_runtime_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:c0c6027f01505bfed6c3b21ec546f69c687689aad5f1a377554bc6ca4aa993a8"}, -] - -[[package]] -name = "nvidia-cudnn-cu12" -version = "9.10.2.21" -description = "cuDNN runtime libraries" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:c9132cc3f8958447b4910a1720036d9eff5928cc3179b0a51fb6d167c6cc87d8"}, - {file = "nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:949452be657fa16687d0930933f032835951ef0892b37d2d53824d1a84dc97a8"}, - {file = "nvidia_cudnn_cu12-9.10.2.21-py3-none-win_amd64.whl", hash = "sha256:c6288de7d63e6cf62988f0923f96dc339cea362decb1bf5b3141883392a7d65e"}, -] - -[package.dependencies] -nvidia-cublas-cu12 = "*" - -[[package]] -name = "nvidia-cufft-cu12" -version = "11.3.3.83" -description = "CUFFT native runtime libraries" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:848ef7224d6305cdb2a4df928759dca7b1201874787083b6e7550dd6765ce69a"}, - {file = "nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d2dd21ec0b88cf61b62e6b43564355e5222e4a3fb394cac0db101f2dd0d4f74"}, - {file = "nvidia_cufft_cu12-11.3.3.83-py3-none-win_amd64.whl", hash = "sha256:7a64a98ef2a7c47f905aaf8931b69a3a43f27c55530c698bb2ed7c75c0b42cb7"}, -] - -[package.dependencies] -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-cufile-cu12" -version = "1.13.1.3" -description = "cuFile GPUDirect libraries" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1d069003be650e131b21c932ec3d8969c1715379251f8d23a1860554b1cb24fc"}, - {file = "nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:4beb6d4cce47c1a0f1013d72e02b0994730359e17801d395bdcbf20cfb3bb00a"}, -] - -[[package]] -name = "nvidia-curand-cu12" -version = "10.3.9.90" -description = "CURAND native runtime libraries" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:dfab99248034673b779bc6decafdc3404a8a6f502462201f2f31f11354204acd"}, - {file = "nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:b32331d4f4df5d6eefa0554c565b626c7216f87a06a4f56fab27c3b68a830ec9"}, - {file = "nvidia_curand_cu12-10.3.9.90-py3-none-win_amd64.whl", hash = "sha256:f149a8ca457277da854f89cf282d6ef43176861926c7ac85b2a0fbd237c587ec"}, -] - -[[package]] -name = "nvidia-cusolver-cu12" -version = "11.7.3.90" -description = "CUDA solver native runtime libraries" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:db9ed69dbef9715071232caa9b69c52ac7de3a95773c2db65bdba85916e4e5c0"}, - {file = "nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:4376c11ad263152bd50ea295c05370360776f8c3427b30991df774f9fb26c450"}, - {file = "nvidia_cusolver_cu12-11.7.3.90-py3-none-win_amd64.whl", hash = "sha256:4a550db115fcabc4d495eb7d39ac8b58d4ab5d8e63274d3754df1c0ad6a22d34"}, -] - -[package.dependencies] -nvidia-cublas-cu12 = "*" -nvidia-cusparse-cu12 = "*" -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-cusparse-cu12" -version = "12.5.8.93" -description = "CUSPARSE native runtime libraries" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b6c161cb130be1a07a27ea6923df8141f3c295852f4b260c65f18f3e0a091dc"}, - {file = "nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ec05d76bbbd8b61b06a80e1eaf8cf4959c3d4ce8e711b65ebd0443bb0ebb13b"}, - {file = "nvidia_cusparse_cu12-12.5.8.93-py3-none-win_amd64.whl", hash = "sha256:9a33604331cb2cac199f2e7f5104dfbb8a5a898c367a53dfda9ff2acb6b6b4dd"}, -] - -[package.dependencies] -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-cusparselt-cu12" -version = "0.7.1" -description = "NVIDIA cuSPARSELt" -optional = false -python-versions = "*" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8878dce784d0fac90131b6817b607e803c36e629ba34dc5b433471382196b6a5"}, - {file = "nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f1bb701d6b930d5a7cea44c19ceb973311500847f81b634d802b7b539dc55623"}, - {file = "nvidia_cusparselt_cu12-0.7.1-py3-none-win_amd64.whl", hash = "sha256:f67fbb5831940ec829c9117b7f33807db9f9678dc2a617fbe781cac17b4e1075"}, -] - -[[package]] -name = "nvidia-nccl-cu12" -version = "2.27.3" -description = "NVIDIA Collective Communication Library (NCCL) Runtime" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_nccl_cu12-2.27.3-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9ddf1a245abc36c550870f26d537a9b6087fb2e2e3d6e0ef03374c6fd19d984f"}, - {file = "nvidia_nccl_cu12-2.27.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adf27ccf4238253e0b826bce3ff5fa532d65fc42322c8bfdfaf28024c0fbe039"}, -] - -[[package]] -name = "nvidia-nvjitlink-cu12" -version = "12.8.93" -description = "Nvidia JIT LTO Library" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:81ff63371a7ebd6e6451970684f916be2eab07321b73c9d244dc2b4da7f73b88"}, - {file = "nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:adccd7161ace7261e01bb91e44e88da350895c270d23f744f0820c818b7229e7"}, - {file = "nvidia_nvjitlink_cu12-12.8.93-py3-none-win_amd64.whl", hash = "sha256:bd93fbeeee850917903583587f4fc3a4eafa022e34572251368238ab5e6bd67f"}, -] - -[[package]] -name = "nvidia-nvtx-cu12" -version = "12.8.90" -description = "NVIDIA Tools Extension" -optional = false -python-versions = ">=3" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d7ad891da111ebafbf7e015d34879f7112832fc239ff0d7d776b6cb685274615"}, - {file = "nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b17e2001cc0d751a5bc2c6ec6d26ad95913324a4adb86788c944f8ce9ba441f"}, - {file = "nvidia_nvtx_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:619c8304aedc69f02ea82dd244541a83c3d9d40993381b3b590f1adaed3db41e"}, -] - [[package]] name = "outcome" version = "1.3.0.post0" @@ -1237,6 +923,18 @@ files = [ dev = ["pre-commit", "tox"] testing = ["coverage", "pytest", "pytest-benchmark"] +[[package]] +name = "poetry-core" +version = "2.1.3" +description = "Poetry PEP 517 Build Backend" +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "poetry_core-2.1.3-py3-none-any.whl", hash = "sha256:2c704f05016698a54ca1d327f46ce2426d72eaca6ff614132c8477c292266771"}, + {file = "poetry_core-2.1.3.tar.gz", hash = "sha256:0522a015477ed622c89aad56a477a57813cace0c8e7ff2a2906b7ef4a2e296a4"}, +] + [[package]] name = "psycopg2-binary" version = "2.9.10" @@ -1317,15 +1015,15 @@ files = [ [[package]] name = "pycparser" -version = "2.22" +version = "2.23" description = "C parser in Python" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "os_name == \"nt\" and implementation_name != \"pypy\"" +markers = "os_name == \"nt\" and implementation_name != \"pypy\" and implementation_name != \"PyPy\"" files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, + {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}, + {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, ] [[package]] @@ -1543,14 +1241,14 @@ files = [ [[package]] name = "pytest" -version = "8.4.1" +version = "8.4.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7"}, - {file = "pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c"}, + {file = "pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79"}, + {file = "pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01"}, ] [package.dependencies] @@ -1799,130 +1497,6 @@ testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2 testingfree = ["huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools-rust (>=1.5.2)"] torch = ["safetensors[numpy]", "torch (>=1.10)"] -[[package]] -name = "scikit-learn" -version = "1.7.1" -description = "A set of python modules for machine learning and data mining" -optional = false -python-versions = ">=3.10" -groups = ["main"] -files = [ - {file = "scikit_learn-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:406204dd4004f0517f0b23cf4b28c6245cbd51ab1b6b78153bc784def214946d"}, - {file = "scikit_learn-1.7.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:16af2e44164f05d04337fd1fc3ae7c4ea61fd9b0d527e22665346336920fe0e1"}, - {file = "scikit_learn-1.7.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2f2e78e56a40c7587dea9a28dc4a49500fa2ead366869418c66f0fd75b80885c"}, - {file = "scikit_learn-1.7.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b62b76ad408a821475b43b7bb90a9b1c9a4d8d125d505c2df0539f06d6e631b1"}, - {file = "scikit_learn-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:9963b065677a4ce295e8ccdee80a1dd62b37249e667095039adcd5bce6e90deb"}, - {file = "scikit_learn-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90c8494ea23e24c0fb371afc474618c1019dc152ce4a10e4607e62196113851b"}, - {file = "scikit_learn-1.7.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:bb870c0daf3bf3be145ec51df8ac84720d9972170786601039f024bf6d61a518"}, - {file = "scikit_learn-1.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:40daccd1b5623f39e8943ab39735cadf0bdce80e67cdca2adcb5426e987320a8"}, - {file = "scikit_learn-1.7.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:30d1f413cfc0aa5a99132a554f1d80517563c34a9d3e7c118fde2d273c6fe0f7"}, - {file = "scikit_learn-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:c711d652829a1805a95d7fe96654604a8f16eab5a9e9ad87b3e60173415cb650"}, - {file = "scikit_learn-1.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3cee419b49b5bbae8796ecd690f97aa412ef1674410c23fc3257c6b8b85b8087"}, - {file = "scikit_learn-1.7.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2fd8b8d35817b0d9ebf0b576f7d5ffbbabdb55536b0655a8aaae629d7ffd2e1f"}, - {file = "scikit_learn-1.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:588410fa19a96a69763202f1d6b7b91d5d7a5d73be36e189bc6396bfb355bd87"}, - {file = "scikit_learn-1.7.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e3142f0abe1ad1d1c31a2ae987621e41f6b578144a911ff4ac94781a583adad7"}, - {file = "scikit_learn-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3ddd9092c1bd469acab337d87930067c87eac6bd544f8d5027430983f1e1ae88"}, - {file = "scikit_learn-1.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b7839687fa46d02e01035ad775982f2470be2668e13ddd151f0f55a5bf123bae"}, - {file = "scikit_learn-1.7.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:a10f276639195a96c86aa572ee0698ad64ee939a7b042060b98bd1930c261d10"}, - {file = "scikit_learn-1.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:13679981fdaebc10cc4c13c43344416a86fcbc61449cb3e6517e1df9d12c8309"}, - {file = "scikit_learn-1.7.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f1262883c6a63f067a980a8cdd2d2e7f2513dddcef6a9eaada6416a7a7cbe43"}, - {file = "scikit_learn-1.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:ca6d31fb10e04d50bfd2b50d66744729dbb512d4efd0223b864e2fdbfc4cee11"}, - {file = "scikit_learn-1.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:781674d096303cfe3d351ae6963ff7c958db61cde3421cd490e3a5a58f2a94ae"}, - {file = "scikit_learn-1.7.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:10679f7f125fe7ecd5fad37dd1aa2daae7e3ad8df7f3eefa08901b8254b3e12c"}, - {file = "scikit_learn-1.7.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1f812729e38c8cb37f760dce71a9b83ccfb04f59b3dca7c6079dcdc60544fa9e"}, - {file = "scikit_learn-1.7.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:88e1a20131cf741b84b89567e1717f27a2ced228e0f29103426102bc2e3b8ef7"}, - {file = "scikit_learn-1.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b1bd1d919210b6a10b7554b717c9000b5485aa95a1d0f177ae0d7ee8ec750da5"}, - {file = "scikit_learn-1.7.1.tar.gz", hash = "sha256:24b3f1e976a4665aa74ee0fcaac2b8fccc6ae77c8e07ab25da3ba6d3292b9802"}, -] - -[package.dependencies] -joblib = ">=1.2.0" -numpy = ">=1.22.0" -scipy = ">=1.8.0" -threadpoolctl = ">=3.1.0" - -[package.extras] -benchmark = ["matplotlib (>=3.5.0)", "memory_profiler (>=0.57.0)", "pandas (>=1.4.0)"] -build = ["cython (>=3.0.10)", "meson-python (>=0.17.1)", "numpy (>=1.22.0)", "scipy (>=1.8.0)"] -docs = ["Pillow (>=8.4.0)", "matplotlib (>=3.5.0)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.4.0)", "plotly (>=5.14.0)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.19.0)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-design (>=0.6.0)", "sphinx-gallery (>=0.17.1)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)", "towncrier (>=24.8.0)"] -examples = ["matplotlib (>=3.5.0)", "pandas (>=1.4.0)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.19.0)", "seaborn (>=0.9.0)"] -install = ["joblib (>=1.2.0)", "numpy (>=1.22.0)", "scipy (>=1.8.0)", "threadpoolctl (>=3.1.0)"] -maintenance = ["conda-lock (==3.0.1)"] -tests = ["matplotlib (>=3.5.0)", "mypy (>=1.15)", "numpydoc (>=1.2.0)", "pandas (>=1.4.0)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pyamg (>=4.2.1)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.11.7)", "scikit-image (>=0.19.0)"] - -[[package]] -name = "scipy" -version = "1.16.1" -description = "Fundamental algorithms for scientific computing in Python" -optional = false -python-versions = ">=3.11" -groups = ["main"] -files = [ - {file = "scipy-1.16.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c033fa32bab91dc98ca59d0cf23bb876454e2bb02cbe592d5023138778f70030"}, - {file = "scipy-1.16.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6e5c2f74e5df33479b5cd4e97a9104c511518fbd979aa9b8f6aec18b2e9ecae7"}, - {file = "scipy-1.16.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0a55ffe0ba0f59666e90951971a884d1ff6f4ec3275a48f472cfb64175570f77"}, - {file = "scipy-1.16.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:f8a5d6cd147acecc2603fbd382fed6c46f474cccfcf69ea32582e033fb54dcfe"}, - {file = "scipy-1.16.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cb18899127278058bcc09e7b9966d41a5a43740b5bb8dcba401bd983f82e885b"}, - {file = "scipy-1.16.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adccd93a2fa937a27aae826d33e3bfa5edf9aa672376a4852d23a7cd67a2e5b7"}, - {file = "scipy-1.16.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:18aca1646a29ee9a0625a1be5637fa798d4d81fdf426481f06d69af828f16958"}, - {file = "scipy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d85495cef541729a70cdddbbf3e6b903421bc1af3e8e3a9a72a06751f33b7c39"}, - {file = "scipy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:226652fca853008119c03a8ce71ffe1b3f6d2844cc1686e8f9806edafae68596"}, - {file = "scipy-1.16.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:81b433bbeaf35728dad619afc002db9b189e45eebe2cd676effe1fb93fef2b9c"}, - {file = "scipy-1.16.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:886cc81fdb4c6903a3bb0464047c25a6d1016fef77bb97949817d0c0d79f9e04"}, - {file = "scipy-1.16.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:15240c3aac087a522b4eaedb09f0ad061753c5eebf1ea430859e5bf8640d5919"}, - {file = "scipy-1.16.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:65f81a25805f3659b48126b5053d9e823d3215e4a63730b5e1671852a1705921"}, - {file = "scipy-1.16.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6c62eea7f607f122069b9bad3f99489ddca1a5173bef8a0c75555d7488b6f725"}, - {file = "scipy-1.16.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f965bbf3235b01c776115ab18f092a95aa74c271a52577bcb0563e85738fd618"}, - {file = "scipy-1.16.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f006e323874ffd0b0b816d8c6a8e7f9a73d55ab3b8c3f72b752b226d0e3ac83d"}, - {file = "scipy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8fd15fc5085ab4cca74cb91fe0a4263b1f32e4420761ddae531ad60934c2119"}, - {file = "scipy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:f7b8013c6c066609577d910d1a2a077021727af07b6fab0ee22c2f901f22352a"}, - {file = "scipy-1.16.1-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:5451606823a5e73dfa621a89948096c6528e2896e40b39248295d3a0138d594f"}, - {file = "scipy-1.16.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:89728678c5ca5abd610aee148c199ac1afb16e19844401ca97d43dc548a354eb"}, - {file = "scipy-1.16.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e756d688cb03fd07de0fffad475649b03cb89bee696c98ce508b17c11a03f95c"}, - {file = "scipy-1.16.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5aa2687b9935da3ed89c5dbed5234576589dd28d0bf7cd237501ccfbdf1ad608"}, - {file = "scipy-1.16.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0851f6a1e537fe9399f35986897e395a1aa61c574b178c0d456be5b1a0f5ca1f"}, - {file = "scipy-1.16.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fedc2cbd1baed37474b1924c331b97bdff611d762c196fac1a9b71e67b813b1b"}, - {file = "scipy-1.16.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2ef500e72f9623a6735769e4b93e9dcb158d40752cdbb077f305487e3e2d1f45"}, - {file = "scipy-1.16.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:978d8311674b05a8f7ff2ea6c6bce5d8b45a0cb09d4c5793e0318f448613ea65"}, - {file = "scipy-1.16.1-cp313-cp313-win_amd64.whl", hash = "sha256:81929ed0fa7a5713fcdd8b2e6f73697d3b4c4816d090dd34ff937c20fa90e8ab"}, - {file = "scipy-1.16.1-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:bcc12db731858abda693cecdb3bdc9e6d4bd200213f49d224fe22df82687bdd6"}, - {file = "scipy-1.16.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:744d977daa4becb9fc59135e75c069f8d301a87d64f88f1e602a9ecf51e77b27"}, - {file = "scipy-1.16.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:dc54f76ac18073bcecffb98d93f03ed6b81a92ef91b5d3b135dcc81d55a724c7"}, - {file = "scipy-1.16.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:367d567ee9fc1e9e2047d31f39d9d6a7a04e0710c86e701e053f237d14a9b4f6"}, - {file = "scipy-1.16.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4cf5785e44e19dcd32a0e4807555e1e9a9b8d475c6afff3d21c3c543a6aa84f4"}, - {file = "scipy-1.16.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3d0b80fb26d3e13a794c71d4b837e2a589d839fd574a6bbb4ee1288c213ad4a3"}, - {file = "scipy-1.16.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8503517c44c18d1030d666cb70aaac1cc8913608816e06742498833b128488b7"}, - {file = "scipy-1.16.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:30cc4bb81c41831ecfd6dc450baf48ffd80ef5aed0f5cf3ea775740e80f16ecc"}, - {file = "scipy-1.16.1-cp313-cp313t-win_amd64.whl", hash = "sha256:c24fa02f7ed23ae514460a22c57eca8f530dbfa50b1cfdbf4f37c05b5309cc39"}, - {file = "scipy-1.16.1-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:796a5a9ad36fa3a782375db8f4241ab02a091308eb079746bc0f874c9b998318"}, - {file = "scipy-1.16.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:3ea0733a2ff73fd6fdc5fecca54ee9b459f4d74f00b99aced7d9a3adb43fb1cc"}, - {file = "scipy-1.16.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:85764fb15a2ad994e708258bb4ed8290d1305c62a4e1ef07c414356a24fcfbf8"}, - {file = "scipy-1.16.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:ca66d980469cb623b1759bdd6e9fd97d4e33a9fad5b33771ced24d0cb24df67e"}, - {file = "scipy-1.16.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e7cc1ffcc230f568549fc56670bcf3df1884c30bd652c5da8138199c8c76dae0"}, - {file = "scipy-1.16.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ddfb1e8d0b540cb4ee9c53fc3dea3186f97711248fb94b4142a1b27178d8b4b"}, - {file = "scipy-1.16.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4dc0e7be79e95d8ba3435d193e0d8ce372f47f774cffd882f88ea4e1e1ddc731"}, - {file = "scipy-1.16.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f23634f9e5adb51b2a77766dac217063e764337fbc816aa8ad9aaebcd4397fd3"}, - {file = "scipy-1.16.1-cp314-cp314-win_amd64.whl", hash = "sha256:57d75524cb1c5a374958a2eae3d84e1929bb971204cc9d52213fb8589183fc19"}, - {file = "scipy-1.16.1-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:d8da7c3dd67bcd93f15618938f43ed0995982eb38973023d46d4646c4283ad65"}, - {file = "scipy-1.16.1-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:cc1d2f2fd48ba1e0620554fe5bc44d3e8f5d4185c8c109c7fbdf5af2792cfad2"}, - {file = "scipy-1.16.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:21a611ced9275cb861bacadbada0b8c0623bc00b05b09eb97f23b370fc2ae56d"}, - {file = "scipy-1.16.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:8dfbb25dffc4c3dd9371d8ab456ca81beeaf6f9e1c2119f179392f0dc1ab7695"}, - {file = "scipy-1.16.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f0ebb7204f063fad87fc0a0e4ff4a2ff40b2a226e4ba1b7e34bf4b79bf97cd86"}, - {file = "scipy-1.16.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f1b9e5962656f2734c2b285a8745358ecb4e4efbadd00208c80a389227ec61ff"}, - {file = "scipy-1.16.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e1a106f8c023d57a2a903e771228bf5c5b27b5d692088f457acacd3b54511e4"}, - {file = "scipy-1.16.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:709559a1db68a9abc3b2c8672c4badf1614f3b440b3ab326d86a5c0491eafae3"}, - {file = "scipy-1.16.1-cp314-cp314t-win_amd64.whl", hash = "sha256:c0c804d60492a0aad7f5b2bb1862f4548b990049e27e828391ff2bf6f7199998"}, - {file = "scipy-1.16.1.tar.gz", hash = "sha256:44c76f9e8b6e8e488a586190ab38016e4ed2f8a038af7cd3defa903c0a2238b3"}, -] - -[package.dependencies] -numpy = ">=1.25.2,<2.6" - -[package.extras] -dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] -doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.19.1)", "jupytext", "linkify-it-py", "matplotlib (>=3.5)", "myst-nb (>=1.2.0)", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.2.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"] -test = ["Cython", "array-api-strict (>=2.3.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] - [[package]] name = "selenium" version = "4.35.0" @@ -1943,28 +1517,6 @@ typing_extensions = ">=4.14.0,<4.15.0" urllib3 = {version = ">=2.5.0,<3.0", extras = ["socks"]} websocket-client = ">=1.8.0,<1.9.0" -[[package]] -name = "setuptools" -version = "80.9.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.9" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" or python_version >= \"3.12\"" -files = [ - {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, - {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] -core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] - [[package]] name = "sniffio" version = "1.3.1" @@ -2099,14 +1651,14 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "starlette" -version = "0.47.2" +version = "0.47.3" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "starlette-0.47.2-py3-none-any.whl", hash = "sha256:c5847e96134e5c5371ee9fac6fdf1a67336d5815e09eb2a01fdb57a351ef915b"}, - {file = "starlette-0.47.2.tar.gz", hash = "sha256:6ae9aa5db235e4846decc1e7b79c4f346adf41e9777aebeb49dfd09bbd7023d8"}, + {file = "starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51"}, + {file = "starlette-0.47.3.tar.gz", hash = "sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9"}, ] [package.dependencies] @@ -2116,36 +1668,6 @@ typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\"" [package.extras] full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] -[[package]] -name = "sympy" -version = "1.14.0" -description = "Computer algebra system (CAS) in Python" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5"}, - {file = "sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517"}, -] - -[package.dependencies] -mpmath = ">=1.1.0,<1.4" - -[package.extras] -dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] - -[[package]] -name = "threadpoolctl" -version = "3.6.0" -description = "threadpoolctl" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb"}, - {file = "threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e"}, -] - [[package]] name = "tokenizers" version = "0.22.0" @@ -2179,69 +1701,6 @@ dev = ["tokenizers[testing]"] docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] testing = ["black (==22.3)", "datasets", "numpy", "pytest", "pytest-asyncio", "requests", "ruff"] -[[package]] -name = "torch" -version = "2.8.0" -description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -optional = false -python-versions = ">=3.9.0" -groups = ["main"] -files = [ - {file = "torch-2.8.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:0be92c08b44009d4131d1ff7a8060d10bafdb7ddcb7359ef8d8c5169007ea905"}, - {file = "torch-2.8.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:89aa9ee820bb39d4d72b794345cccef106b574508dd17dbec457949678c76011"}, - {file = "torch-2.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e8e5bf982e87e2b59d932769938b698858c64cc53753894be25629bdf5cf2f46"}, - {file = "torch-2.8.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:a3f16a58a9a800f589b26d47ee15aca3acf065546137fc2af039876135f4c760"}, - {file = "torch-2.8.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:220a06fd7af8b653c35d359dfe1aaf32f65aa85befa342629f716acb134b9710"}, - {file = "torch-2.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c12fa219f51a933d5f80eeb3a7a5d0cbe9168c0a14bbb4055f1979431660879b"}, - {file = "torch-2.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:8c7ef765e27551b2fbfc0f41bcf270e1292d9bf79f8e0724848b1682be6e80aa"}, - {file = "torch-2.8.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:5ae0524688fb6707c57a530c2325e13bb0090b745ba7b4a2cd6a3ce262572916"}, - {file = "torch-2.8.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e2fab4153768d433f8ed9279c8133a114a034a61e77a3a104dcdf54388838705"}, - {file = "torch-2.8.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2aca0939fb7e4d842561febbd4ffda67a8e958ff725c1c27e244e85e982173c"}, - {file = "torch-2.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:2f4ac52f0130275d7517b03a33d2493bab3693c83dcfadf4f81688ea82147d2e"}, - {file = "torch-2.8.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:619c2869db3ada2c0105487ba21b5008defcc472d23f8b80ed91ac4a380283b0"}, - {file = "torch-2.8.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:2b2f96814e0345f5a5aed9bf9734efa913678ed19caf6dc2cddb7930672d6128"}, - {file = "torch-2.8.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:65616ca8ec6f43245e1f5f296603e33923f4c30f93d65e103d9e50c25b35150b"}, - {file = "torch-2.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:659df54119ae03e83a800addc125856effda88b016dfc54d9f65215c3975be16"}, - {file = "torch-2.8.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:1a62a1ec4b0498930e2543535cf70b1bef8c777713de7ceb84cd79115f553767"}, - {file = "torch-2.8.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:83c13411a26fac3d101fe8035a6b0476ae606deb8688e904e796a3534c197def"}, - {file = "torch-2.8.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:8f0a9d617a66509ded240add3754e462430a6c1fc5589f86c17b433dd808f97a"}, - {file = "torch-2.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a7242b86f42be98ac674b88a4988643b9bc6145437ec8f048fea23f72feb5eca"}, - {file = "torch-2.8.0-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:7b677e17f5a3e69fdef7eb3b9da72622f8d322692930297e4ccb52fefc6c8211"}, - {file = "torch-2.8.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:da6afa31c13b669d4ba49d8a2169f0db2c3ec6bec4af898aa714f401d4c38904"}, - {file = "torch-2.8.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:06fcee8000e5c62a9f3e52a688b9c5abb7c6228d0e56e3452983416025c41381"}, - {file = "torch-2.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:5128fe752a355d9308e56af1ad28b15266fe2da5948660fad44de9e3a9e36e8c"}, - {file = "torch-2.8.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:e9f071f5b52a9f6970dc8a919694b27a91ae9dc08898b2b988abbef5eddfd1ae"}, -] - -[package.dependencies] -filelock = "*" -fsspec = "*" -jinja2 = "*" -networkx = "*" -nvidia-cublas-cu12 = {version = "12.8.4.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-cupti-cu12 = {version = "12.8.90", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-nvrtc-cu12 = {version = "12.8.93", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-runtime-cu12 = {version = "12.8.90", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cudnn-cu12 = {version = "9.10.2.21", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cufft-cu12 = {version = "11.3.3.83", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cufile-cu12 = {version = "1.13.1.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-curand-cu12 = {version = "10.3.9.90", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusolver-cu12 = {version = "11.7.3.90", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusparse-cu12 = {version = "12.5.8.93", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusparselt-cu12 = {version = "0.7.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nccl-cu12 = {version = "2.27.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nvjitlink-cu12 = {version = "12.8.93", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nvtx-cu12 = {version = "12.8.90", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -setuptools = {version = "*", markers = "python_version >= \"3.12\""} -sympy = ">=1.13.3" -triton = {version = "3.4.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -typing-extensions = ">=4.10.0" - -[package.extras] -opt-einsum = ["opt-einsum (>=3.3)"] -optree = ["optree (>=0.13.0)"] -pyyaml = ["pyyaml"] - [[package]] name = "tqdm" version = "4.67.1" @@ -2266,14 +1725,14 @@ telegram = ["requests"] [[package]] name = "transformers" -version = "4.56.0" +version = "4.56.1" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" optional = false python-versions = ">=3.9.0" groups = ["main"] files = [ - {file = "transformers-4.56.0-py3-none-any.whl", hash = "sha256:bacf539c38dd850690856881c4974321af93a22f2ee96bcc994741a2121d8e71"}, - {file = "transformers-4.56.0.tar.gz", hash = "sha256:6ca9c3f38aa4da93ebf877db7156368c1c188c7465f09dbe70951e7622e987fa"}, + {file = "transformers-4.56.1-py3-none-any.whl", hash = "sha256:1697af6addfb6ddbce9618b763f4b52d5a756f6da4899ffd1b4febf58b779248"}, + {file = "transformers-4.56.1.tar.gz", hash = "sha256:0d88b1089a563996fc5f2c34502f10516cad3ea1aa89f179f522b54c8311fe74"}, ] [package.dependencies] @@ -2290,23 +1749,23 @@ tqdm = ">=4.27" [package.extras] accelerate = ["accelerate (>=0.26.0)"] -all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "av", "codecarbon (>=2.8.1)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "jinja2 (>=3.1.0)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "kernels (>=0.6.1,<=0.9)", "librosa", "mistral-common[opencv] (>=1.6.3)", "num2words", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision"] +all = ["Pillow (>=10.0.1,<=15.0)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "accelerate (>=0.26.0)", "av", "codecarbon (>=2.8.1)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "jinja2 (>=3.1.0)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "kernels (>=0.6.1,<=0.9)", "librosa", "mistral-common[opencv] (>=1.6.3)", "num2words", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision"] audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] benchmark = ["optimum-benchmark (>=0.3.0)"] chat-template = ["jinja2 (>=3.1.0)"] codecarbon = ["codecarbon (>=2.8.1)"] deepspeed = ["accelerate (>=0.26.0)", "deepspeed (>=0.9.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.26.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "libcst", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "optuna", "parameterized (>=0.9)", "protobuf", "psutil", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "av", "beautifulsoup4", "codecarbon (>=2.8.1)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "jinja2 (>=3.1.0)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "kernels (>=0.6.1,<=0.9)", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "num2words", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.22.0,<=0.23.0)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "beautifulsoup4", "codecarbon (>=2.8.1)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "kenlm", "kernels (>=0.6.1,<=0.9)", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "num2words", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.26.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "libcst", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "optuna", "parameterized (>=0.9)", "protobuf", "psutil", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "accelerate (>=0.26.0)", "av", "beautifulsoup4", "codecarbon (>=2.8.1)", "cookiecutter (==1.7.3)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "jinja2 (>=3.1.0)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "kernels (>=0.6.1,<=0.9)", "libcst", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "num2words", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict_core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic_lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "libcst", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "onnxconverter-common", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "tf2onnx", "timeout-decorator", "tokenizers (>=0.22.0,<=0.23.0)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "beautifulsoup4", "codecarbon (>=2.8.1)", "cookiecutter (==1.7.3)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "kenlm", "kernels (>=0.6.1,<=0.9)", "libcst", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "num2words", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict_core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic_lite (>=1.0.7)", "urllib3 (<2.0.0)"] flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)", "scipy (<1.13.0)"] flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] ftfy = ["ftfy"] -hf-xet = ["hf-xet"] +hf-xet = ["hf_xet"] hub-kernels = ["kernels (>=0.6.1,<=0.9)"] integrations = ["kernels (>=0.6.1,<=0.9)", "optuna", "ray[tune] (>=2.7.0)", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict_core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic_lite (>=1.0.7)"] mistral-common = ["mistral-common[opencv] (>=1.6.3)"] modelcreation = ["cookiecutter (==1.7.3)"] natten = ["natten (>=0.14.6,<0.15.0)"] @@ -2325,7 +1784,7 @@ serving = ["accelerate (>=0.26.0)", "fastapi", "openai (>=1.98.0)", "pydantic (> sigopt = ["sigopt"] sklearn = ["scikit-learn"] speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "libcst", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "parameterized (>=0.9)", "psutil", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "libcst", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "parameterized (>=0.9)", "psutil", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] tf = ["keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] tf-cpu = ["keras (>2.9,<2.16)", "keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow-cpu (>2.9,<2.16)", "tensorflow-probability (<0.24)", "tensorflow-text (<2.16)", "tf2onnx"] tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] @@ -2335,7 +1794,7 @@ tokenizers = ["tokenizers (>=0.22.0,<=0.23.0)"] torch = ["accelerate (>=0.26.0)", "torch (>=2.2)"] torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.34.0,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "tqdm (>=4.27)"] +torchhub = ["filelock", "huggingface-hub (>=0.34.0,<1.0)", "importlib_metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "tqdm (>=4.27)"] video = ["av"] vision = ["Pillow (>=10.0.1,<=15.0)"] @@ -2376,31 +1835,6 @@ outcome = ">=1.2.0" trio = ">=0.11" wsproto = ">=0.14" -[[package]] -name = "triton" -version = "3.4.0" -description = "A language and compiler for custom Deep Learning operations" -optional = false -python-versions = "<3.14,>=3.9" -groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\"" -files = [ - {file = "triton-3.4.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ff2785de9bc02f500e085420273bb5cc9c9bb767584a4aa28d6e360cec70128"}, - {file = "triton-3.4.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b70f5e6a41e52e48cfc087436c8a28c17ff98db369447bcaff3b887a3ab4467"}, - {file = "triton-3.4.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:31c1d84a5c0ec2c0f8e8a072d7fd150cab84a9c239eaddc6706c081bfae4eb04"}, - {file = "triton-3.4.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00be2964616f4c619193cb0d1b29a99bd4b001d7dc333816073f92cf2a8ccdeb"}, - {file = "triton-3.4.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7936b18a3499ed62059414d7df563e6c163c5e16c3773678a3ee3d417865035d"}, - {file = "triton-3.4.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98e5c1442eaeabae2e2452ae765801bd53cd4ce873cab0d1bdd59a32ab2d9397"}, -] - -[package.dependencies] -setuptools = ">=40.8.0" - -[package.extras] -build = ["cmake (>=3.20,<4.0)", "lit"] -tests = ["autopep8", "isort", "llnl-hatchet", "numpy", "pytest", "pytest-forked", "pytest-xdist", "scipy (>=1.7.1)"] -tutorials = ["matplotlib", "pandas", "tabulate"] - [[package]] name = "typing-extensions" version = "4.14.1" @@ -2519,4 +1953,4 @@ h11 = ">=0.9.0,<1" [metadata] lock-version = "2.1" python-versions = ">=3.11,<3.14" -content-hash = "72004c91cb88e0c411cff8447951d5c36a0a44c0b8cf3937a24860c10700251f" +content-hash = "1d22766abbf718055b5ad2442ed8a1ad90732746d77df7dc19194a0ca3b219ba" diff --git a/apps/pre-processing-service/pyproject.toml b/apps/pre-processing-service/pyproject.toml index f3b4d06c..bad7f3bc 100644 --- a/apps/pre-processing-service/pyproject.toml +++ b/apps/pre-processing-service/pyproject.toml @@ -5,7 +5,7 @@ description = "" authors = [ {name = "skip"} ] -readme = "README.md" + requires-python = ">=3.11,<3.14" dependencies = [ "fastapi (>=0.116.1,<0.117.0)", @@ -22,16 +22,14 @@ dependencies = [ "selenium (>=4.35.0,<5.0.0)", "transformers (>=4.56.0,<5.0.0)", "numpy (>=2.3.2,<3.0.0)", - "torch (>=2.8.0,<3.0.0)", - "scikit-learn (>=1.7.1,<2.0.0)", "python-dotenv (>=1.1.1,<2.0.0)", "mecab-python3 (>=1.0.10,<2.0.0)", "httpx (>=0.28.1,<0.29.0)", - "asyncpg (>=0.30.0,<0.31.0)", - "gunicorn (>=23.0.0,<24.0.0)", "pyperclip (>=1.9.0,<2.0.0)", "pymysql (>=1.1.2,<2.0.0)", "sqlalchemy (>=2.0.43,<3.0.0)", + "poetry-core (>=2.1.3,<3.0.0)", + "dbutils (>=3.1.2,<4.0.0)" ] From 51e6ec717f8914a87e3037f71816975cb434cfab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EA=B2=BD=EB=AF=BC?= <153978154+kakusiA@users.noreply.github.com> Date: Wed, 10 Sep 2025 17:06:17 +0900 Subject: [PATCH 22/31] chore: Python CI setting (#65) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: python CI 추가 * chore: 코드 테스트 * chore: poetry 의존성 추가 * refactor:코드 포멧팅 * chore: ci ruff제거 * chore: - config.py 의존성 삭제 - ci 테스트 env의존성 추가 * chore: - code formatting * refactor:테스트코드 url 버그 수정 * chore:docker image add * chore:poetry 필요없는 라이브러리 삭제 refactor: CI 코드 리펙토링 * chore:ruff 주석 처리 * chore:패키지 다시 추가 * chore: python CI 코드 리펙토링 * chore: 코드변경 * chore: CI 파이썬 코드 수정 * chore:CI 코드 롤백 * chore: 도커 파일 수정 * chore: poetry dependencies 변경 * chore:dockerfile 수정 * chore: poetry torch 변경 * chore: black refactor * chore: pyhton CI 설정 완료 - python CI 개발 - dockerFile 수정 -docker-compose 파일 수정 * chore:코드 포멧팅 * chore:poetry패키지 변경 --- .github/workflows/ci-python.yml | 147 +++ apps/pre-processing-service/.dockerignore | 20 + apps/pre-processing-service/Dockerfile | 33 +- .../app/api/endpoints/blog.py | 53 +- .../app/api/endpoints/keywords.py | 10 +- .../app/api/endpoints/product.py | 18 +- .../app/api/endpoints/test.py | 70 +- apps/pre-processing-service/app/api/router.py | 11 +- .../pre-processing-service/app/core/config.py | 37 +- .../app/db/AsyncPostgreSQLManager.py | 17 +- .../app/db/PostgreSQLManager.py | 14 +- .../app/db/db_connecter.py | 2 +- .../app/db/mariadb_manager.py | 24 +- .../app/decorators/logging.py | 31 +- .../app/errors/BlogPostingException.py | 38 +- .../app/errors/CrawlingException.py | 22 +- .../app/errors/CustomException.py | 15 +- .../app/errors/handlers.py | 15 +- apps/pre-processing-service/app/main.py | 6 +- .../middleware/BackServiceLoggerDependency.py | 2 +- .../app/middleware/ServiceLoggerMiddleware.py | 93 +- .../app/middleware/logging.py | 18 +- .../app/model/schemas.py | 127 ++- .../service/blog/base_blog_post_service.py | 10 +- .../service/blog/blogger_blog_post_service.py | 50 +- .../service/blog/naver_blog_post_service.py | 63 +- .../service/blog/tistory_blog_post_service.py | 53 +- .../app/service/crawl_service.py | 25 +- .../app/service/keyword_service.py | 32 +- .../app/service/match_service.py | 46 +- .../app/service/search_service.py | 53 +- .../app/service/similarity_service.py | 93 +- .../app/test/test_keyword.py | 23 +- .../app/test/test_mariadb_connection.py | 12 +- .../app/test/test_match_service.py | 28 +- .../app/test/test_sadagu_crawl.py | 122 +-- .../app/test/test_search_service.py | 24 +- .../app/test/test_similarity_service.py | 49 +- .../app/utils/crawler_utils.py | 219 ++-- .../app/utils/crawling_util.py | 7 +- .../app/utils/keyword_matcher.py | 95 +- .../app/utils/similarity_analyzer.py | 73 +- apps/pre-processing-service/poetry.lock | 955 ++++++++++++++++-- apps/pre-processing-service/pyproject.toml | 65 +- docker/production/docker-compose.yml | 9 + 45 files changed, 2130 insertions(+), 799 deletions(-) create mode 100644 .github/workflows/ci-python.yml create mode 100644 apps/pre-processing-service/.dockerignore diff --git a/.github/workflows/ci-python.yml b/.github/workflows/ci-python.yml new file mode 100644 index 00000000..5055aea3 --- /dev/null +++ b/.github/workflows/ci-python.yml @@ -0,0 +1,147 @@ +name: CI (Python/FastAPI) + +on: + push: + branches: + - main + paths: + - "apps/pre-processing-service/**" # Python 서비스 경로 + pull_request: + types: [opened, synchronize, reopened, ready_for_review] + branches: + - main + - develop + - release/** + paths: + - "apps/pre-processing-service/**" # Python 서비스 경로 + +permissions: + contents: read + packages: write + security-events: write + checks: write + pull-requests: write + +jobs: + lint: + if: github.event.pull_request.draft == false + name: Lint & Format Check + runs-on: ubuntu-latest + + defaults: + run: + working-directory: apps/pre-processing-service + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v4 + with: + path: apps/pre-processing-service/.venv + key: venv-${{ runner.os }}-${{ hashFiles('apps/pre-processing-service/poetry.lock') }} + + - name: Install dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction --no-root + + - name: Run Formatter Check (Black) + run: poetry run black --check . + + # - name: Run Linter (Ruff) + # run: poetry run ruff check . + + test: + name: Run Tests + runs-on: ubuntu-latest + needs: lint + + defaults: + run: + working-directory: apps/pre-processing-service + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v4 + with: + path: apps/pre-processing-service/.venv + key: venv-${{ runner.os }}-${{ hashFiles('apps/pre-processing-service/poetry.lock') }} + + - name: Install dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction --no-root + + - name: Run tests with Pytest + env: + DB_HOST: localhost + DB_PORT: 3306 + DB_USER: test_user + DB_PASS: test_pass + DB_NAME: test_db + ENV_NAME: test + run: poetry run pytest + + build-and-push-docker: + name: Build Docker Image and push to registry + runs-on: ubuntu-latest + if: github.ref == 'refs/heads/feature/python-ci' && github.event_name == 'push' + needs: + - test + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Login to Docker Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set repo lowercase + run: echo "REPO_LC=${GITHUB_REPOSITORY,,}" >> $GITHUB_ENV + + - name: Build and push Docker image + uses: docker/build-push-action@v5 + with: + context: ./apps/pre-processing-service # Dockerfile이 있는 경로 + push: true + tags: | + ghcr.io/${{ env.REPO_LC }}/pre-processing-service:latest + ghcr.io/${{ env.REPO_LC }}/pre-processing-service:${{ github.sha }} + + - name: Analyze image layers + run: | + echo "=== Image Layer Analysis ===" + docker history ghcr.io/${{ env.REPO_LC }}/pre-processing-service:latest --human --no-trunc diff --git a/apps/pre-processing-service/.dockerignore b/apps/pre-processing-service/.dockerignore new file mode 100644 index 00000000..51db904a --- /dev/null +++ b/apps/pre-processing-service/.dockerignore @@ -0,0 +1,20 @@ +.git +.gitignore +**/__pycache__/ +**/*.pyc +**/.pytest_cache/ +**/.mypy_cache/ +**/.ruff_cache/ +**/.venv/ +**/node_modules/ +**/dist/ +**/build/ +tests/ +docs/ +scripts/ +.github/ +.env +.env.* +*.log +pytest-report.xml +coverage.xml diff --git a/apps/pre-processing-service/Dockerfile b/apps/pre-processing-service/Dockerfile index 073dea33..69b7cacd 100644 --- a/apps/pre-processing-service/Dockerfile +++ b/apps/pre-processing-service/Dockerfile @@ -1,18 +1,35 @@ +# ---- builder ---- FROM python:3.11-slim AS builder WORKDIR /app + +# 필수 OS 패키지 RUN apt-get update && apt-get install -y --no-install-recommends curl \ && rm -rf /var/lib/apt/lists/* + +# Poetry 설치 RUN curl -sSL https://install.python-poetry.org | python3 - ENV PATH="/root/.local/bin:$PATH" -RUN poetry config virtualenvs.create false +RUN poetry self add "poetry-plugin-export>=1.7.0" +# 런타임 가상환경 +RUN python -m venv /opt/venv +ENV PATH="/opt/venv/bin:$PATH" + +# 의존성 해결 → requirements로 export → pip로 설치(= 반드시 /opt/venv에 설치됨) COPY pyproject.toml poetry.lock ./ -RUN poetry install --no-root +RUN poetry export --without dev -f requirements.txt -o requirements.txt \ + && pip install --no-cache-dir -r requirements.txt +# ---- runtime ---- FROM python:3.11-slim AS final WORKDIR /app -# site-packages + 콘솔 스크립트(gunicorn/uvicorn) 함께 복사 -COPY --from=builder /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages -COPY --from=builder /usr/local/bin /usr/local/bin -COPY ./app ./app -EXPOSE 8000 -CMD ["gunicorn", "-w", "2", "-k", "uvicorn.workers.UvicornWorker", "-b", "0.0.0.0:8000", "app.main:app"] + +# /opt/venv 복사 +COPY --from=builder /opt/venv /opt/venv +ENV PATH="/opt/venv/bin:$PATH" + +# 앱 소스 +COPY . . + + +# (권장 대안) 코드에서 uvicorn import 안 하고 프로세스 매니저로 실행하려면: +CMD ["gunicorn", "-k", "uvicorn.workers.UvicornWorker", "app.main:app", "-b", "0.0.0.0:8000"] diff --git a/apps/pre-processing-service/app/api/endpoints/blog.py b/apps/pre-processing-service/app/api/endpoints/blog.py index 85c6924c..04ae0b14 100644 --- a/apps/pre-processing-service/app/api/endpoints/blog.py +++ b/apps/pre-processing-service/app/api/endpoints/blog.py @@ -8,18 +8,29 @@ router = APIRouter() + @router.get("/", summary="블로그 API 상태 확인") async def root(): return {"message": "blog API"} -@router.post("/rag/create", response_model=ResponseBlogCreate, summary="RAG 기반 블로그 콘텐츠 생성") + +@router.post( + "/rag/create", + response_model=ResponseBlogCreate, + summary="RAG 기반 블로그 콘텐츠 생성", +) async def rag_create(request: RequestBlogCreate): """ RAG 기반 블로그 콘텐츠 생성 """ return {"message": "blog API"} -@router.post("/publish", response_model=ResponseBlogPublish, summary="블로그 콘텐츠 배포 (네이버/티스토리/블로거 지원)") + +@router.post( + "/publish", + response_model=ResponseBlogPublish, + summary="블로그 콘텐츠 배포 (네이버/티스토리/블로거 지원)", +) async def publish(request: RequestBlogPublish): """ 생성된 블로그 콘텐츠를 배포합니다. @@ -31,17 +42,15 @@ async def publish(request: RequestBlogPublish): result = naver_service.post_content( title=request.post_title, content=request.post_content, - tags=request.post_tags + tags=request.post_tags, ) if not result: - raise CustomException("네이버 블로그 포스팅에 실패했습니다.", status_code=500) + raise CustomException( + "네이버 블로그 포스팅에 실패했습니다.", status_code=500 + ) return ResponseBlogPublish( - job_id= 1, - schedule_id= 1, - schedule_his_id= 1, - status="200", - metadata=result + job_id=1, schedule_id=1, schedule_his_id=1, status="200", metadata=result ) elif request.tag == "tistory": @@ -49,18 +58,16 @@ async def publish(request: RequestBlogPublish): result = tistory_service.post_content( title=request.post_title, content=request.post_content, - tags=request.post_tags + tags=request.post_tags, ) if not result: - raise CustomException("티스토리 블로그 포스팅에 실패했습니다.", status_code=500) + raise CustomException( + "티스토리 블로그 포스팅에 실패했습니다.", status_code=500 + ) return ResponseBlogPublish( - job_id= 1, - schedule_id= 1, - schedule_his_id= 1, - status="200", - metadata=result + job_id=1, schedule_id=1, schedule_his_id=1, status="200", metadata=result ) elif request.tag == "blogger": @@ -68,16 +75,14 @@ async def publish(request: RequestBlogPublish): result = blogger_service.post_content( title=request.post_title, content=request.post_content, - tags=request.post_tags + tags=request.post_tags, ) if not result: - raise CustomException("블로거 블로그 포스팅에 실패했습니다.", status_code=500) + raise CustomException( + "블로거 블로그 포스팅에 실패했습니다.", status_code=500 + ) return ResponseBlogPublish( - job_id= 1, - schedule_id= 1, - schedule_his_id= 1, - status="200", - metadata=result - ) \ No newline at end of file + job_id=1, schedule_id=1, schedule_his_id=1, status="200", metadata=result + ) diff --git a/apps/pre-processing-service/app/api/endpoints/keywords.py b/apps/pre-processing-service/app/api/endpoints/keywords.py index 22c23fa6..2b407d6d 100644 --- a/apps/pre-processing-service/app/api/endpoints/keywords.py +++ b/apps/pre-processing-service/app/api/endpoints/keywords.py @@ -14,7 +14,9 @@ async def root(): return {"message": "keyword API"} -@router.post("/search", response_model=ResponseNaverSearch, summary="네이버 키워드 검색") +@router.post( + "/search", response_model=ResponseNaverSearch, summary="네이버 키워드 검색" +) async def search(request: RequestNaverSearch): """ 이 엔드포인트는 JSON 요청으로 네이버 키워드 검색을 수행합니다. @@ -34,7 +36,11 @@ async def search(request: RequestNaverSearch): return response_data -@router.post("/ssadagu/validate", response_model=ResponseNaverSearch, summary="사다구몰 키워드 검증") +@router.post( + "/ssadagu/validate", + response_model=ResponseNaverSearch, + summary="사다구몰 키워드 검증", +) async def ssadagu_validate(request: RequestNaverSearch): """ 사다구몰 키워드 검증 테스트용 엔드포인트 diff --git a/apps/pre-processing-service/app/api/endpoints/product.py b/apps/pre-processing-service/app/api/endpoints/product.py index 023096f8..d3881ed6 100644 --- a/apps/pre-processing-service/app/api/endpoints/product.py +++ b/apps/pre-processing-service/app/api/endpoints/product.py @@ -1,6 +1,10 @@ from fastapi import APIRouter, Request, HTTPException from app.decorators.logging import log_api_call -from ...errors.CustomException import InvalidItemDataException, ItemNotFoundException, CustomException +from ...errors.CustomException import ( + InvalidItemDataException, + ItemNotFoundException, + CustomException, +) from ...service.crawl_service import CrawlService from ...service.search_service import SearchService from ...service.match_service import MatchService @@ -56,7 +60,9 @@ async def match(request: RequestSadaguMatch): raise HTTPException(status_code=500, detail=str(e)) -@router.post("/similarity", response_model=ResponseSadaguSimilarity, summary="상품 유사도 분석") +@router.post( + "/similarity", response_model=ResponseSadaguSimilarity, summary="상품 유사도 분석" +) async def similarity(request: RequestSadaguSimilarity): """ 매칭된 상품들 중 키워드와의 유사도를 계산하여 최적의 상품을 선택합니다. @@ -66,7 +72,9 @@ async def similarity(request: RequestSadaguSimilarity): result = similarity_service.select_product_by_similarity(request) if not result: - raise CustomException(500, "유사도 분석에 실패했습니다.", "SIMILARITY_FAILED") + raise CustomException( + 500, "유사도 분석에 실패했습니다.", "SIMILARITY_FAILED" + ) return result except InvalidItemDataException as e: @@ -75,7 +83,9 @@ async def similarity(request: RequestSadaguSimilarity): raise HTTPException(status_code=500, detail=str(e)) -@router.post("/crawl", response_model=ResponseSadaguCrawl, summary="상품 상세 정보 크롤링") +@router.post( + "/crawl", response_model=ResponseSadaguCrawl, summary="상품 상세 정보 크롤링" +) async def crawl(request: Request, body: RequestSadaguCrawl): """ 상품 상세 페이지를 크롤링하여 상세 정보를 수집합니다. diff --git a/apps/pre-processing-service/app/api/endpoints/test.py b/apps/pre-processing-service/app/api/endpoints/test.py index 9225c7cd..6ed44d08 100644 --- a/apps/pre-processing-service/app/api/endpoints/test.py +++ b/apps/pre-processing-service/app/api/endpoints/test.py @@ -1,4 +1,5 @@ # app/api/endpoints/embedding.py +import loguru from fastapi import APIRouter from sqlalchemy import text @@ -13,23 +14,25 @@ from ...service.match_service import MatchService from ...service.search_service import SearchService from ...service.similarity_service import SimilarityService -from ...db.db_connecter import engine # ✅ 우리가 만든 DB 유틸 임포트 +from ...db.db_connecter import engine # ✅ 우리가 만든 DB 유틸 임포트 + # 이 파일만의 독립적인 라우터를 생성합니다. router = APIRouter() + @router.get("/") async def root(): return {"message": "테스트 API"} -@router.get("/hello/{name}" , tags=["hello"]) +@router.get("/hello/{name}", tags=["hello"]) # @log_api_call async def say_hello(name: str): return {"message": f"Hello {name}"} # 특정 경로에서 의도적으로 에러 발생 -#커스텀에러 테스터 url +# 커스텀에러 테스터 url @router.get("/error/{item_id}") async def trigger_error(item_id: int): if item_id == 0: @@ -41,8 +44,8 @@ async def trigger_error(item_id: int): if item_id == 500: raise ValueError("이것은 테스트용 값 오류입니다.") + return {"result": item_id} - return {"result": item_id}\ @router.get("/db-test", tags=["db"]) async def db_test(): @@ -55,71 +58,68 @@ async def db_test(): except Exception as e: return {"status": "error", "detail": str(e)} + def with_meta(data: Mapping[str, Any], meta: Mapping[str, Any]) -> Dict[str, Any]: """요청 payload + 공통 meta 머지""" return {**meta, **data} -@router.get("/tester",response_model=None) + +@router.get("/tester", response_model=None) async def processing_tester(): meta = { "job_id": 1, "schedule_id": 1, - "schedule_his_id": 1, # ✅ 타이포 수정 + "schedule_his_id": 1, # ✅ 타이포 수정 } - request_dict = { - "tag":"naver", - "category":"50000000", - "start_date":"2025-09-01", - "end_date":"2025-09-02" + request_dict = { + "tag": "naver", + "category": "50000000", + "start_date": "2025-09-01", + "end_date": "2025-09-02", } - #네이버 키워드 검색 - naver_request = RequestNaverSearch(**with_meta(meta,request_dict)) + # 네이버 키워드 검색 + naver_request = RequestNaverSearch(**with_meta(meta, request_dict)) response_data = await keyword_search(naver_request) keyword = response_data.get("keyword") - print(keyword) + loguru.logger.info(keyword) - keyword ={ - "keyword" : keyword, + keyword = { + "keyword": keyword, } - #싸다구 상품 검색 + # 싸다구 상품 검색 sadagu_request = RequestSadaguSearch(**with_meta(meta, keyword)) search_service = SearchService() keyword_result = await search_service.search_products(sadagu_request) - print(keyword_result) + loguru.logger.info(keyword_result) - #싸다구 상품 매치 + # 싸다구 상품 매치 keyword["search_results"] = keyword_result.get("search_results") keyword_match_request = RequestSadaguMatch(**with_meta(meta, keyword)) match_service = MatchService() keyword_match_response = match_service.match_products(keyword_match_request) - print(keyword_match_response) + loguru.logger.info(keyword_match_response) - #싸다구 상품 유사도 분석 + # 싸다구 상품 유사도 분석 keyword["matched_products"] = keyword_match_response.get("matched_products") keyword_similarity_request = RequestSadaguSimilarity(**with_meta(meta, keyword)) similarity_service = SimilarityService() keyword_similarity_response = similarity_service.select_product_by_similarity( keyword_similarity_request ) - print(keyword_similarity_response) - - #싸다구 상품 크롤링 + loguru.logger.info(keyword_similarity_response) + # 싸다구 상품 크롤링 + # 블로그 생성 - #블로그 생성 - - - - #블로그 배포 + # 블로그 배포 tistory_service = TistoryBlogPostService() result = tistory_service.post_content( - title = "안녕하살법", - content = "안녕하살법 받아치기", - tags= ["퉁퉁퉁사후르","짜라짜라"] + title="안녕하살법", + content="안녕하살법 받아치기러기 코드 받아치기", + tags=["퉁퉁퉁사후르", "짜라짜라"], ) - print(result) - + loguru.logger.info(result) - return "구웃" \ No newline at end of file + return "구웃" diff --git a/apps/pre-processing-service/app/api/router.py b/apps/pre-processing-service/app/api/router.py index dce62c5c..99286cf6 100644 --- a/apps/pre-processing-service/app/api/router.py +++ b/apps/pre-processing-service/app/api/router.py @@ -11,22 +11,21 @@ # processing API URL api_router.include_router(blog.router, prefix="/blogs", tags=["blog"]) -#상품 API URL +# 상품 API URL api_router.include_router(product.router, prefix="/products", tags=["product"]) -#모듈 테스터를 위한 endpoint -> 추후 삭제 예정 +# 모듈 테스터를 위한 endpoint -> 추후 삭제 예정 api_router.include_router(test.router, prefix="/tests", tags=["Test"]) + @api_router.get("/ping") async def root(): return {"message": "서버 실행중입니다."} + @api_router.get("/db") def get_settings(): """ 환경 변수가 올바르게 로드되었는지 확인하는 엔드포인트 """ - return { - "환경": settings.env_name, - "데이터베이스 URL": settings.db_url - } + return {"환경": settings.env_name, "데이터베이스 URL": settings.db_url} diff --git a/apps/pre-processing-service/app/core/config.py b/apps/pre-processing-service/app/core/config.py index aab10515..ed54cc69 100644 --- a/apps/pre-processing-service/app/core/config.py +++ b/apps/pre-processing-service/app/core/config.py @@ -11,14 +11,19 @@ def detect_mecab_dicdir() -> Optional[str]: # 1. mecab-config 명령어로 사전 경로 확인 (가장 정확한 방법) try: - result = subprocess.run(['mecab-config', '--dicdir'], - capture_output=True, text=True, timeout=5) + result = subprocess.run( + ["mecab-config", "--dicdir"], capture_output=True, text=True, timeout=5 + ) if result.returncode == 0: dicdir = result.stdout.strip() if os.path.exists(dicdir): print(f"mecab-config에서 사전 경로 발견: {dicdir}") return dicdir - except (subprocess.CalledProcessError, FileNotFoundError, subprocess.TimeoutExpired): + except ( + subprocess.CalledProcessError, + FileNotFoundError, + subprocess.TimeoutExpired, + ): pass # 2. 플랫폼별 일반적인 경로들 확인 @@ -29,7 +34,7 @@ def detect_mecab_dicdir() -> Optional[str]: "/opt/homebrew/lib/mecab/dic/mecab-ko-dic", # Apple Silicon "/usr/local/lib/mecab/dic/mecab-ko-dic", # Intel Mac "/opt/homebrew/lib/mecab/dic/mecab-ipadic", # 기본 사전 - "/usr/local/lib/mecab/dic/mecab-ipadic" + "/usr/local/lib/mecab/dic/mecab-ipadic", ] elif system == "linux": candidate_paths = [ @@ -38,13 +43,13 @@ def detect_mecab_dicdir() -> Optional[str]: "/usr/local/lib/mecab/dic/mecab-ko-dic", "/usr/share/mecab/dic/mecab-ko-dic", "/usr/lib/mecab/dic/mecab-ipadic", - "/usr/local/lib/mecab/dic/mecab-ipadic" + "/usr/local/lib/mecab/dic/mecab-ipadic", ] elif system == "windows": candidate_paths = [ "C:/Program Files/MeCab/dic/mecab-ko-dic", "C:/mecab/dic/mecab-ko-dic", - "C:/Program Files/MeCab/dic/mecab-ipadic" + "C:/Program Files/MeCab/dic/mecab-ipadic", ] else: candidate_paths = [] @@ -60,6 +65,7 @@ def detect_mecab_dicdir() -> Optional[str]: return None + # 공통 설정을 위한 BaseSettings class BaseSettingsConfig(BaseSettings): @@ -74,14 +80,6 @@ class BaseSettingsConfig(BaseSettings): # MeCab 사전 경로 (자동 감지) mecab_path: Optional[str] = None - # 외부 서비스 계정 정보 - naver_id: Optional[str] = None - naver_password: Optional[str] = None - tistory_blog_name: Optional[str] = None - tistory_blog_url: Optional[str] = None - tistory_id: Optional[str] = None - tistory_password: Optional[str] = None - def __init__(self, **kwargs): super().__init__(**kwargs) @@ -94,22 +92,23 @@ def __init__(self, **kwargs): @property def db_url(self) -> str: """개별 필드를 사용하여 DB URL을 동적으로 생성""" - return( + return ( f"mysql+pymysql://{self.db_user}:" f"{self.db_pass}" f"@{self.db_host}:{self.db_port}/{self.db_name}" ) - model_config = SettingsConfigDict(env_file=['.env']) + model_config = SettingsConfigDict(env_file=[".env"]) # 환경별 설정 클래스 class DevSettings(BaseSettingsConfig): - model_config = SettingsConfigDict(env_file=['.env', '.env.dev']) + model_config = SettingsConfigDict(env_file=[".env", ".env.dev"]) class PrdSettings(BaseSettingsConfig): - model_config = SettingsConfigDict(env_file=['.env', '.env.prod']) + model_config = SettingsConfigDict(env_file=[".env", ".env.prod"]) + def get_settings() -> BaseSettingsConfig: """환경 변수에 따라 적절한 설정 객체를 반환하는 함수""" @@ -122,4 +121,4 @@ def get_settings() -> BaseSettingsConfig: raise ValueError(f"Invalid MODE environment variable: {mode}") -settings = get_settings() \ No newline at end of file +settings = get_settings() diff --git a/apps/pre-processing-service/app/db/AsyncPostgreSQLManager.py b/apps/pre-processing-service/app/db/AsyncPostgreSQLManager.py index a6152755..c783e8c8 100644 --- a/apps/pre-processing-service/app/db/AsyncPostgreSQLManager.py +++ b/apps/pre-processing-service/app/db/AsyncPostgreSQLManager.py @@ -44,11 +44,11 @@ def __init__(self): self._pool = None self._config = { - 'host': os.getenv('DB_HOST', '52.79.235.214'), - 'port': int(os.getenv('DB_PORT', 5432)), - 'database': os.getenv('DB_NAME', 'pre_process'), - 'user': os.getenv('DB_USER', 'postgres'), - 'password': os.getenv('DB_PASSWORD', 'qwer1234') + "host": os.getenv("DB_HOST", "52.79.235.214"), + "port": int(os.getenv("DB_PORT", 5432)), + "database": os.getenv("DB_NAME", "pre_process"), + "user": os.getenv("DB_USER", "postgres"), + "password": os.getenv("DB_PASSWORD", "qwer1234"), } self._initialized = True @@ -72,9 +72,7 @@ async def init_pool(self, min_size=5, max_size=20): if self._pool is None: self._pool = await asyncpg.create_pool( - min_size=min_size, - max_size=max_size, - **self._config + min_size=min_size, max_size=max_size, **self._config ) return self._pool @@ -182,8 +180,9 @@ async def close_pool(self): self._pool = None print("비동기 DB 연결 풀 전체 종료") + """ # 사용 예시 init_pool() - 애플리케이션 시작 시 단 한번만 호출 (main.py에서 실행, early startup) -""" \ No newline at end of file +""" diff --git a/apps/pre-processing-service/app/db/PostgreSQLManager.py b/apps/pre-processing-service/app/db/PostgreSQLManager.py index 606f7b5c..ca3ccede 100644 --- a/apps/pre-processing-service/app/db/PostgreSQLManager.py +++ b/apps/pre-processing-service/app/db/PostgreSQLManager.py @@ -5,6 +5,7 @@ import os import threading + class PostgreSQLManager: """ PostgreSQL 매니저 클래스 @@ -42,11 +43,11 @@ def __init__(self): self._pool = None self._config = { - 'host': os.getenv('DB_HOST', '52.79.235.214'), - 'port': int(os.getenv('DB_PORT', '5432')), - 'database': os.getenv('DB_NAME', 'pre_process'), - 'user': os.getenv('DB_USER', 'postgres'), - 'password': os.getenv('DB_PASSWORD', 'qwer1234') + "host": os.getenv("DB_HOST", "52.79.235.214"), + "port": int(os.getenv("DB_PORT", "5432")), + "database": os.getenv("DB_NAME", "pre_process"), + "user": os.getenv("DB_USER", "postgres"), + "password": os.getenv("DB_PASSWORD", "qwer1234"), } self._initialized = True @@ -130,6 +131,7 @@ def close_pool(self): self._pool = None print("DB 연결 풀 전체 종료") + """ # get_cursor 사용 예시 : 리소스 자동 정리 try: @@ -139,4 +141,4 @@ def close_pool(self): except Exception as e: print(f"에러 발생: {e}") # 자동으로 롤백, 커서 닫기, 커넥션 반환 수행 -""" \ No newline at end of file +""" diff --git a/apps/pre-processing-service/app/db/db_connecter.py b/apps/pre-processing-service/app/db/db_connecter.py index 2612cd65..027d924d 100644 --- a/apps/pre-processing-service/app/db/db_connecter.py +++ b/apps/pre-processing-service/app/db/db_connecter.py @@ -5,4 +5,4 @@ engine = create_engine( settings.db_url, pool_pre_ping=True, # 연결 유효성 체크 -) \ No newline at end of file +) diff --git a/apps/pre-processing-service/app/db/mariadb_manager.py b/apps/pre-processing-service/app/db/mariadb_manager.py index 225de471..63288b13 100644 --- a/apps/pre-processing-service/app/db/mariadb_manager.py +++ b/apps/pre-processing-service/app/db/mariadb_manager.py @@ -6,6 +6,7 @@ from dotenv import load_dotenv from dbutils.pooled_db import PooledDB + class MariadbManager: """ MariaDB 매니저 클래스 @@ -43,17 +44,20 @@ def __init__(self): return self._config = { - 'host': os.getenv('DB_HOST', 'localhost'), - 'port': int(os.getenv('DB_PORT', '3306')), - 'database': os.getenv('DB_NAME', 'pre_process'), - 'user': os.getenv('DB_USER', 'mariadb'), - 'password': os.getenv('DB_PASSWORD', 'qwer1234'), - 'autocommit': False + "host": os.getenv("DB_HOST", "localhost"), + "port": int(os.getenv("DB_PORT", "3306")), + "database": os.getenv("DB_NAME", "pre_process"), + "user": os.getenv("DB_USER", "mariadb"), + "password": os.getenv("DB_PASSWORD", "qwer1234"), + "autocommit": False, } - required_keys = ['host', 'database', 'user', 'password'] - missing = [k for k, v in self._config.items() - if k in required_keys and (v is None or v == '')] + required_keys = ["host", "database", "user", "password"] + missing = [ + k + for k, v in self._config.items() + if k in required_keys and (v is None or v == "") + ] if missing: raise ValueError(f"필수 데이터베이스 설정이 누락되었습니다: {missing}") @@ -79,7 +83,7 @@ def _init_pool(self, pool_size=20): maxusage=None, setsession=[], ping=0, - **config + **config, ) except pymysql.Error as e: raise Exception(f"MariaDB 커넥션 풀 초기화 실패: {e}") diff --git a/apps/pre-processing-service/app/decorators/logging.py b/apps/pre-processing-service/app/decorators/logging.py index 145cb0a0..23604a73 100644 --- a/apps/pre-processing-service/app/decorators/logging.py +++ b/apps/pre-processing-service/app/decorators/logging.py @@ -16,7 +16,7 @@ def log_api_call(func): async def wrapper(*args, **kwargs): # 1. request 객체를 안전하게 가져옵니다. # kwargs에서 'request'를 찾고, 없으면 args가 비어있지 않은 경우에만 args[0]을 시도합니다. - request: Request | None = kwargs.get('request') + request: Request | None = kwargs.get("request") if request is None and args and isinstance(args[0], Request): request = args[0] @@ -28,19 +28,17 @@ async def wrapper(*args, **kwargs): user_agent = request.headers.get("user-agent", "N/A") # 3. 요청 정보를 로그로 기록합니다. - log_context = { - "func": func.__name__, - "ip": client_ip, - "user_agent": user_agent - } + log_context = {"func": func.__name__, "ip": client_ip, "user_agent": user_agent} if request: - log_context.update({ - "url": str(request.url), - "method": request.method, - }) + log_context.update( + { + "url": str(request.url), + "method": request.method, + } + ) logger.info( "API 호출 시작: URL='{url}' 메서드='{method}' 함수='{func}' IP='{ip}' User-Agent='{user_agent}'", - **log_context + **log_context, ) else: logger.info("API 호출 시작: 함수='{func}'", **log_context) @@ -61,12 +59,12 @@ async def wrapper(*args, **kwargs): if request: logger.error( "API 호출 실패: URL='{url}' 메서드='{method}' IP='{ip}' 예외='{exception}' ({elapsed})", - **log_context + **log_context, ) else: logger.error( "API 호출 실패: 함수='{func}' 예외='{exception}' ({elapsed})", - **log_context + **log_context, ) raise # 예외를 다시 발생시켜 FastAPI가 처리하도록 합니다. finally: @@ -77,12 +75,11 @@ async def wrapper(*args, **kwargs): if request: logger.success( "API 호출 성공: URL='{url}' 메서드='{method}' IP='{ip}' ({elapsed})", - **log_context + **log_context, ) else: logger.success( - "API 호출 성공: 함수='{func}' ({elapsed})", - **log_context + "API 호출 성공: 함수='{func}' ({elapsed})", **log_context ) - return wrapper \ No newline at end of file + return wrapper diff --git a/apps/pre-processing-service/app/errors/BlogPostingException.py b/apps/pre-processing-service/app/errors/BlogPostingException.py index d8a70c2f..f0d49484 100644 --- a/apps/pre-processing-service/app/errors/BlogPostingException.py +++ b/apps/pre-processing-service/app/errors/BlogPostingException.py @@ -1,92 +1,110 @@ from app.errors.CustomException import CustomException from typing import List, Optional + class BlogLoginException(CustomException): """ 블로그 로그인 실패 예외 @:param platform: 로그인하려는 플랫폼 (네이버, 티스토리 등) @:param reason: 로그인 실패 이유 """ + def __init__(self, platform: str, reason: str = "인증 정보가 올바르지 않습니다"): super().__init__( status_code=401, detail=f"{platform} 로그인에 실패했습니다. {reason}", - code="BLOG_LOGIN_FAILED" + code="BLOG_LOGIN_FAILED", ) + class BlogPostPublishException(CustomException): """ 블로그 포스트 발행 실패 예외 @:param platform: 발행하려는 플랫폼 @:param reason: 발행 실패 이유 """ - def __init__(self, platform: str, reason: str = "포스트 발행 중 오류가 발생했습니다"): + + def __init__( + self, platform: str, reason: str = "포스트 발행 중 오류가 발생했습니다" + ): super().__init__( status_code=422, detail=f"{platform} 포스트 발행에 실패했습니다. {reason}", - code="BLOG_POST_PUBLISH_FAILED" + code="BLOG_POST_PUBLISH_FAILED", ) + class BlogContentValidationException(CustomException): """ 블로그 콘텐츠 유효성 검사 실패 예외 @:param field: 유효성 검사 실패한 필드 @:param reason: 실패 이유 """ + def __init__(self, field: str, reason: str): super().__init__( status_code=400, detail=f"콘텐츠 유효성 검사 실패: {field} - {reason}", - code="BLOG_CONTENT_VALIDATION_FAILED" + code="BLOG_CONTENT_VALIDATION_FAILED", ) + class BlogElementInteractionException(CustomException): """ 블로그 페이지 요소와의 상호작용 실패 예외 @:param element: 상호작용하려던 요소 @:param action: 수행하려던 액션 """ + def __init__(self, element: str, action: str): super().__init__( status_code=422, detail=f"블로그 페이지 요소 상호작용 실패: {element}에서 {action} 작업 실패", - code="BLOG_ELEMENT_INTERACTION_FAILED" + code="BLOG_ELEMENT_INTERACTION_FAILED", ) + class BlogServiceUnavailableException(CustomException): """ 블로그 서비스 이용 불가 예외 @:param platform: 이용 불가한 플랫폼 @:param reason: 이용 불가 이유 """ - def __init__(self, platform: str, reason: str = "서비스가 일시적으로 이용 불가합니다"): + + def __init__( + self, platform: str, reason: str = "서비스가 일시적으로 이용 불가합니다" + ): super().__init__( status_code=503, detail=f"{platform} 서비스 이용 불가: {reason}", - code="BLOG_SERVICE_UNAVAILABLE" + code="BLOG_SERVICE_UNAVAILABLE", ) + class BlogConfigurationException(CustomException): """ 블로그 서비스 설정 오류 예외 @:param config_item: 설정 오류 항목 """ + def __init__(self, config_item: str): super().__init__( status_code=500, detail=f"블로그 서비스 설정 오류: {config_item}", - code="BLOG_CONFIGURATION_ERROR" + code="BLOG_CONFIGURATION_ERROR", ) + class BloggerApiException(CustomException): """ Blogger API 관련 오류 예외 @:param reason: 실패 이유 @:param detail: 상세 오류 메시지 """ + def __init__(self, reason: str, detail: str): super().__init__( status_code=500, detail=f"Blogger API 오류: {reason} ({detail})", - code="BLOGGER_API_ERROR" - ) \ No newline at end of file + code="BLOGGER_API_ERROR", + ) diff --git a/apps/pre-processing-service/app/errors/CrawlingException.py b/apps/pre-processing-service/app/errors/CrawlingException.py index 1928e30f..4db0ff43 100644 --- a/apps/pre-processing-service/app/errors/CrawlingException.py +++ b/apps/pre-processing-service/app/errors/CrawlingException.py @@ -1,27 +1,31 @@ from app.errors.CustomException import CustomException from typing import List + class PageLoadTimeoutException(CustomException): """ 페이지 로드 타임아웃 예외 @:param url: 로드하려는 페이지의 URL """ - def __init__(self, url : str): + + def __init__(self, url: str): super().__init__( status_code=408, detail=f"페이지 로드가 시간 초과되었습니다. URL: {url}", - code="PAGE_LOAD_TIMEOUT" + code="PAGE_LOAD_TIMEOUT", ) + class WebDriverConnectionException(CustomException): """ 웹 드라이버 연결 실패 예외 """ + def __init__(self): super().__init__( status_code=500, detail="웹 드라이버 연결에 실패했습니다.", - code="WEBDRIVER_ERROR" + code="WEBDRIVER_ERROR", ) @@ -30,34 +34,38 @@ class ElementNotFoundException(CustomException): 특정 HTML 요소를 찾을 수 없는 예외 @:param selector: 찾으려는 요소의 CSS 선택자 """ + def __init__(self, selector: str): super().__init__( status_code=404, detail=f"요소를 찾을 수 없습니다. 선택자: {selector}", - code="ELEMENT_NOT_FOUND" + code="ELEMENT_NOT_FOUND", ) + class HtmlParsingException(CustomException): """ HTML 파싱 실패 예외 @:param reason: 파싱 실패 이유 """ + def __init__(self, reason: str): super().__init__( status_code=422, detail=f"HTML 파싱에 실패했습니다. 이유: {reason}", - code="HTML_PARSING_ERROR" + code="HTML_PARSING_ERROR", ) + class DataExtractionException(CustomException): """ 데이터 추출 실패 예외 @:param field: 추출하려는 데이터 필드 목록 """ + def __init__(self, field: List[str]): super().__init__( status_code=422, detail=f"데이터 추출에 실패했습니다. 필드: {', '.join(field)}", - code="DATA_EXTRACTION_ERROR" + code="DATA_EXTRACTION_ERROR", ) - diff --git a/apps/pre-processing-service/app/errors/CustomException.py b/apps/pre-processing-service/app/errors/CustomException.py index 4c3f84a3..0ae08734 100644 --- a/apps/pre-processing-service/app/errors/CustomException.py +++ b/apps/pre-processing-service/app/errors/CustomException.py @@ -3,42 +3,49 @@ class CustomException(Exception): """ 개발자가 비지니스 로직에 맞게 의도적으로 에러를 정의 """ + def __init__(self, status_code: int, detail: str, code: str): self.status_code = status_code self.detail = detail self.code = code + # 구체적인 커스텀 예외 정의 class ItemNotFoundException(CustomException): """ 아이템을 찾을수 없는 예외 @:param item_id: 찾을수 없는 아이템의 ID """ + def __init__(self, item_id: int): super().__init__( status_code=404, detail=f"{item_id}를 찾을수 없습니다.", - code="ITEM_NOT_FOUND" + code="ITEM_NOT_FOUND", ) + class InvalidItemDataException(CustomException): """ 데이터 유효성 검사 실패 예외 """ + def __init__(self): super().__init__( status_code=422, detail="데이터가 유효하지않습니다..", - code="INVALID_ITEM_DATA" + code="INVALID_ITEM_DATA", ) + class DatabaseConnectionException(CustomException): """ 데이터베이스 연결 실패 예외 """ + def __init__(self): super().__init__( status_code=500, detail="데이터베이스 연결에 실패했습니다.", - code="DATABASE_CONNECTION_ERROR" - ) \ No newline at end of file + code="DATABASE_CONNECTION_ERROR", + ) diff --git a/apps/pre-processing-service/app/errors/handlers.py b/apps/pre-processing-service/app/errors/handlers.py index 1b5caf3d..882a6078 100644 --- a/apps/pre-processing-service/app/errors/handlers.py +++ b/apps/pre-processing-service/app/errors/handlers.py @@ -6,15 +6,18 @@ from .messages import ERROR_MESSAGES, get_error_message from ..errors.CustomException import CustomException + class ErrorBaseModel(BaseModel): """ 모든 에러 응답의 기반이 되는 Pydantic 모델. API의 에러 응답 형식을 통일하는 역할을 합니다. """ + status_code: int detail: str code: str + # CustomException 핸들러 async def custom_exception_handler(request: Request, exc: CustomException): """ @@ -22,9 +25,7 @@ async def custom_exception_handler(request: Request, exc: CustomException): """ # 변경점: ErrorBaseModel을 사용하여 응답 본문 생성 error_content = ErrorBaseModel( - status_code=exc.status_code, - detail=exc.detail, - code=exc.code + status_code=exc.status_code, detail=exc.detail, code=exc.code ) return JSONResponse( status_code=exc.status_code, @@ -41,9 +42,7 @@ async def http_exception_handler(request: Request, exc: StarletteHTTPException): # 변경점: ErrorBaseModel을 사용하여 응답 본문 생성 error_content = ErrorBaseModel( - status_code=exc.status_code, - detail=message, - code=f"HTTP_{exc.status_code}" + status_code=exc.status_code, detail=message, code=f"HTTP_{exc.status_code}" ) return JSONResponse( status_code=exc.status_code, @@ -60,7 +59,7 @@ async def validation_exception_handler(request: Request, exc: RequestValidationE base_error = ErrorBaseModel( status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=ERROR_MESSAGES[status.HTTP_422_UNPROCESSABLE_ENTITY], - code="VALIDATION_ERROR" + code="VALIDATION_ERROR", ) # 모델의 내용과 추가적인 'details' 필드를 결합 @@ -82,7 +81,7 @@ async def unhandled_exception_handler(request: Request, exc: Exception): error_content = ErrorBaseModel( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=ERROR_MESSAGES[status.HTTP_500_INTERNAL_SERVER_ERROR], - code="INTERNAL_SERVER_ERROR" + code="INTERNAL_SERVER_ERROR", ) return JSONResponse( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, diff --git a/apps/pre-processing-service/app/main.py b/apps/pre-processing-service/app/main.py index d13c523d..9865d845 100644 --- a/apps/pre-processing-service/app/main.py +++ b/apps/pre-processing-service/app/main.py @@ -12,11 +12,7 @@ from app.errors.handlers import * # --- FastAPI 애플리케이션 인스턴스 생성 --- -app = FastAPI( - title="pre-processing-service", - description="", - version="1.0.0" -) +app = FastAPI(title="pre-processing-service", description="", version="1.0.0") # --- 예외 핸들러 등록 --- # 등록 순서가 중요합니다: 구체적인 예외부터 등록하고 가장 일반적인 예외(Exception)를 마지막에 등록합니다. diff --git a/apps/pre-processing-service/app/middleware/BackServiceLoggerDependency.py b/apps/pre-processing-service/app/middleware/BackServiceLoggerDependency.py index bbaa2cfd..d18630f6 100644 --- a/apps/pre-processing-service/app/middleware/BackServiceLoggerDependency.py +++ b/apps/pre-processing-service/app/middleware/BackServiceLoggerDependency.py @@ -121,4 +121,4 @@ # "NAVER_CRAWLING", # track_params=["job_id", "schedule_id", "tag", "category", "startDate", "endDate"], # response_trackers=["keyword", "total_keyword"] -# ) \ No newline at end of file +# ) diff --git a/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py b/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py index edb13f8b..acb120fa 100644 --- a/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py +++ b/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py @@ -9,7 +9,7 @@ import json import time -trace_id_context: ContextVar[str] = ContextVar('trace_id', default="NO_TRACE_ID") +trace_id_context: ContextVar[str] = ContextVar("trace_id", default="NO_TRACE_ID") class ServiceLoggerMiddleware(BaseHTTPMiddleware): @@ -37,14 +37,35 @@ def _default_mappings(self) -> Dict[str, Dict]: return { "/keywords/search": { "service_type": "NAVER_CRAWLING", - "track_params": ["keyword", "category", "startDate", "endDate", "job_id", "schedule_id"], - "response_trackers": ["keyword", "total_keywords", "results_count"] + "track_params": [ + "keyword", + "category", + "startDate", + "endDate", + "job_id", + "schedule_id", + ], + "response_trackers": ["keyword", "total_keywords", "results_count"], }, "/blogs/publish": { - "service_type": "BLOG_PUBLISH", - "track_params": ["tag", "title", "content", "tags", "job_id", "schedule_id", "schedule_his_id"], - "response_trackers": ["job_id", "schedule_id", "schedule_his_id", "status", "metadata"] - } + "service_type": "BLOG_PUBLISH", + "track_params": [ + "tag", + "title", + "content", + "tags", + "job_id", + "schedule_id", + "schedule_his_id", + ], + "response_trackers": [ + "job_id", + "schedule_id", + "schedule_his_id", + "status", + "metadata", + ], + }, } async def dispatch(self, request: Request, call_next): @@ -77,8 +98,12 @@ async def dispatch(self, request: Request, call_next): # 4. 성공 로깅 if 200 <= response.status_code < 300: await self._log_success_response( - service_type, trace_id, start_time, param_str, - response, service_config["response_trackers"] + service_type, + trace_id, + start_time, + param_str, + response, + service_config["response_trackers"], ) else: await self._log_error_response( @@ -102,9 +127,11 @@ def _get_service_config(self, url_path: str) -> Optional[Dict]: def _match_pattern(self, url_path: str, pattern: str) -> bool: """URL 패턴 매칭 (간단한 구현, 필요시 정규식으로 확장 가능)""" # 정확히 일치하거나 패턴이 접두사인 경우 - return url_path == pattern or url_path.startswith(pattern.rstrip('*')) + return url_path == pattern or url_path.startswith(pattern.rstrip("*")) - async def _extract_params(self, request: Request, track_params: List[str]) -> Dict[str, Any]: + async def _extract_params( + self, request: Request, track_params: List[str] + ) -> Dict[str, Any]: """요청에서 추적 파라미터 추출""" params = {} @@ -137,9 +164,15 @@ async def _extract_params(self, request: Request, track_params: List[str]) -> Di return params - async def _log_success_response(self, service_type: str, trace_id: str, - start_time: float, param_str: str, - response: Response, response_trackers: List[str]): + async def _log_success_response( + self, + service_type: str, + trace_id: str, + start_time: float, + param_str: str, + response: Response, + response_trackers: List[str], + ): """성공 응답 로깅""" duration = time.time() - start_time @@ -147,16 +180,16 @@ async def _log_success_response(self, service_type: str, trace_id: str, f"[{service_type}_SUCCESS]", f"trace_id={trace_id}", f"execution_time={duration:.4f}s{param_str}", - f"status_code={response.status_code}" + f"status_code={response.status_code}", ] # 응답 데이터에서 추적 정보 추출 if isinstance(response, JSONResponse) and response_trackers: try: # JSONResponse body 읽기 - if hasattr(response, 'body'): + if hasattr(response, "body"): response_data = json.loads(response.body.decode()) - elif hasattr(response, 'content'): + elif hasattr(response, "content"): response_data = response.content else: response_data = None @@ -167,7 +200,9 @@ async def _log_success_response(self, service_type: str, trace_id: str, if tracker in response_data: value = response_data[tracker] if isinstance(value, dict): - response_params.append(f"{tracker}_keys={list(value.keys())}") + response_params.append( + f"{tracker}_keys={list(value.keys())}" + ) response_params.append(f"{tracker}_count={len(value)}") elif isinstance(value, list): response_params.append(f"{tracker}_count={len(value)}") @@ -182,8 +217,14 @@ async def _log_success_response(self, service_type: str, trace_id: str, logger.info(" ".join(log_parts)) - async def _log_error_response(self, service_type: str, trace_id: str, - start_time: float, param_str: str, response: Response): + async def _log_error_response( + self, + service_type: str, + trace_id: str, + start_time: float, + param_str: str, + response: Response, + ): """에러 응답 로깅""" duration = time.time() - start_time logger.error( @@ -192,12 +233,18 @@ async def _log_error_response(self, service_type: str, trace_id: str, f"status_code={response.status_code}" ) - async def _log_exception(self, service_type: str, trace_id: str, - start_time: float, param_str: str, exception: Exception): + async def _log_exception( + self, + service_type: str, + trace_id: str, + start_time: float, + param_str: str, + exception: Exception, + ): """예외 로깅""" duration = time.time() - start_time logger.error( f"[{service_type}_EXCEPTION] trace_id={trace_id} " f"execution_time={duration:.4f}s{param_str} " f"exception={str(exception)}" - ) \ No newline at end of file + ) diff --git a/apps/pre-processing-service/app/middleware/logging.py b/apps/pre-processing-service/app/middleware/logging.py index 29cbe738..9a8cb6a0 100644 --- a/apps/pre-processing-service/app/middleware/logging.py +++ b/apps/pre-processing-service/app/middleware/logging.py @@ -1,4 +1,3 @@ - import time from fastapi import Request from loguru import logger @@ -12,7 +11,9 @@ async def dispatch(self, request: Request, call_next): # 1. 요청 시작 로그 logger.info( "요청 시작: IP='{}' 메서드='{}' URL='{}'", - request.client.host, request.method, request.url.path + request.client.host, + request.method, + request.url.path, ) try: @@ -23,7 +24,10 @@ async def dispatch(self, request: Request, call_next): process_time = time.time() - start_time logger.info( "요청 성공: 메서드='{}' URL='{}' 상태코드='{}' (처리 시간: {:.4f}s)", - request.method, request.url.path, response.status_code, process_time + request.method, + request.url.path, + response.status_code, + process_time, ) return response @@ -32,7 +36,11 @@ async def dispatch(self, request: Request, call_next): process_time = time.time() - start_time logger.error( "요청 실패: IP='{}' 메서드='{}' URL='{}' 예외='{}' (처리 시간: {:.4f}s)", - request.client.host, request.method, request.url.path, e, process_time + request.client.host, + request.method, + request.url.path, + e, + process_time, ) # 예외를 다시 발생시켜 FastAPI의 기본 핸들러가 처리하도록 함 - raise \ No newline at end of file + raise diff --git a/apps/pre-processing-service/app/model/schemas.py b/apps/pre-processing-service/app/model/schemas.py index b3982638..61720cb6 100644 --- a/apps/pre-processing-service/app/model/schemas.py +++ b/apps/pre-processing-service/app/model/schemas.py @@ -5,87 +5,154 @@ # 기본 요청 class RequestBase(BaseModel): - job_id: int = Field(..., title="작업 ID", description="현재 실행 중인 작업의 고유 식별자") - schedule_id: int = Field(..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자") - schedule_his_id: Optional[int] = Field(None, title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자") + job_id: int = Field( + ..., title="작업 ID", description="현재 실행 중인 작업의 고유 식별자" + ) + schedule_id: int = Field( + ..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자" + ) + schedule_his_id: Optional[int] = Field( + None, title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자" + ) + # 기본 응답 class ResponseBase(BaseModel): - job_id: int = Field(..., title="작업 ID", description="현재 실행 중인 작업의 고유 식별자") - schedule_id: int = Field(..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자") - schedule_his_id: Optional[int] = Field(None, title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자") + job_id: int = Field( + ..., title="작업 ID", description="현재 실행 중인 작업의 고유 식별자" + ) + schedule_id: int = Field( + ..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자" + ) + schedule_his_id: Optional[int] = Field( + None, title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자" + ) status: str = Field(..., title="상태", description="요청 처리 상태") + # 네이버 키워드 추출 class RequestNaverSearch(RequestBase): tag: str = Field(..., title="태그", description="데이터랩/스토어 태그 구분") - category: Optional[str] = Field(None, title="카테고리", description="검색할 카테고리") - start_date: Optional[str] = Field(None, title="시작일", description="검색 시작 날짜 (YYYY-MM-DD)") - end_date: Optional[str] = Field(None, title="종료일", description="검색 종료 날짜 (YYYY-MM-DD)") + category: Optional[str] = Field( + None, title="카테고리", description="검색할 카테고리" + ) + start_date: Optional[str] = Field( + None, title="시작일", description="검색 시작 날짜 (YYYY-MM-DD)" + ) + end_date: Optional[str] = Field( + None, title="종료일", description="검색 종료 날짜 (YYYY-MM-DD)" + ) + class ResponseNaverSearch(ResponseBase): category: Optional[str] = Field(None, title="카테고리", description="검색 카테고리") keyword: str = Field(..., title="키워드", description="검색에 사용된 키워드") - total_keyword: Dict[int, str] = Field(..., title="총 키워드", description="키워드별 총 검색 결과") + total_keyword: Dict[int, str] = Field( + ..., title="총 키워드", description="키워드별 총 검색 결과" + ) + # 2단계: 검색 class RequestSadaguSearch(RequestBase): keyword: str = Field(..., title="검색 키워드", description="상품을 검색할 키워드") + class ResponseSadaguSearch(ResponseBase): keyword: str = Field(..., title="검색 키워드", description="검색에 사용된 키워드") - search_results: List[Dict] = Field(..., title="검색 결과", description="검색된 상품 목록") + search_results: List[Dict] = Field( + ..., title="검색 결과", description="검색된 상품 목록" + ) + # 3단계: 매칭 class RequestSadaguMatch(RequestBase): keyword: str = Field(..., title="매칭 키워드", description="상품과 매칭할 키워드") - search_results: List[Dict] = Field(..., title="검색 결과", description="이전 단계에서 검색된 상품 목록") + search_results: List[Dict] = Field( + ..., title="검색 결과", description="이전 단계에서 검색된 상품 목록" + ) + class ResponseSadaguMatch(ResponseBase): keyword: str = Field(..., title="매칭 키워드", description="매칭에 사용된 키워드") - matched_products: List[Dict] = Field(..., title="매칭된 상품", description="키워드와 매칭된 상품 목록") + matched_products: List[Dict] = Field( + ..., title="매칭된 상품", description="키워드와 매칭된 상품 목록" + ) + # 4단계: 유사도 class RequestSadaguSimilarity(RequestBase): - keyword: str = Field(..., title="유사도 분석 키워드", description="유사도 분석할 키워드") - matched_products: List[Dict] = Field(..., title="매칭된 상품", description="이전 단계에서 매칭된 상품 목록") - search_results: Optional[List[Dict]] = Field(None, title="검색 결과", description="매칭 실패시 사용할 전체 검색 결과 (폴백용)") + keyword: str = Field( + ..., title="유사도 분석 키워드", description="유사도 분석할 키워드" + ) + matched_products: List[Dict] = Field( + ..., title="매칭된 상품", description="이전 단계에서 매칭된 상품 목록" + ) + search_results: Optional[List[Dict]] = Field( + None, + title="검색 결과", + description="매칭 실패시 사용할 전체 검색 결과 (폴백용)", + ) + class ResponseSadaguSimilarity(ResponseBase): - keyword: str = Field(..., title="분석 키워드", description="유사도 분석에 사용된 키워드") - selected_product: Optional[Dict] = Field(None, title="선택된 상품", description="유사도 분석 결과 선택된 상품") - reason: Optional[str] = Field(None, title="선택 이유", description="상품 선택 근거 및 점수 정보") + keyword: str = Field( + ..., title="분석 키워드", description="유사도 분석에 사용된 키워드" + ) + selected_product: Optional[Dict] = Field( + None, title="선택된 상품", description="유사도 분석 결과 선택된 상품" + ) + reason: Optional[str] = Field( + None, title="선택 이유", description="상품 선택 근거 및 점수 정보" + ) + # 사다구몰 크롤링 class RequestSadaguCrawl(RequestBase): - tag: str = Field(..., title="크롤링 태그", description="크롤링 유형을 구분하는 태그 (예: 'detail')") - product_url: HttpUrl = Field(..., title="상품 URL", description="크롤링할 상품 페이지의 URL") + tag: str = Field( + ..., + title="크롤링 태그", + description="크롤링 유형을 구분하는 태그 (예: 'detail')", + ) + product_url: HttpUrl = Field( + ..., title="상품 URL", description="크롤링할 상품 페이지의 URL" + ) + class ResponseSadaguCrawl(ResponseBase): tag: str = Field(..., title="크롤링 태그", description="크롤링 유형 태그") product_url: str = Field(..., title="상품 URL", description="크롤링된 상품 URL") - product_detail: Optional[Dict] = Field(None, title="상품 상세정보", description="크롤링된 상품의 상세 정보") - crawled_at: Optional[str] = Field(None, title="크롤링 시간", description="크롤링 완료 시간") + product_detail: Optional[Dict] = Field( + None, title="상품 상세정보", description="크롤링된 상품의 상세 정보" + ) + crawled_at: Optional[str] = Field( + None, title="크롤링 시간", description="크롤링 완료 시간" + ) + # 블로그 콘텐츠 생성 class RequestBlogCreate(RequestBase): pass + class ResponseBlogCreate(ResponseBase): pass + # 블로그 배포 class RequestBlogPublish(RequestBase): tag: str = Field(..., title="블로그 태그", description="블로그 플랫폼 종류") - blog_id: str = Field(..., description= "블로그 아이디") - blog_pw: str = Field(..., description= "블로그 비밀번호") - post_title: str = Field(..., description= "포스팅 제목") - post_content: str = Field(..., description= "포스팅 내용") - post_tags: List[str] = Field(default=[], description= "포스팅 태그 목록") + blog_id: str = Field(..., description="블로그 아이디") + blog_pw: str = Field(..., description="블로그 비밀번호") + post_title: str = Field(..., description="포스팅 제목") + post_content: str = Field(..., description="포스팅 내용") + post_tags: List[str] = Field(default=[], description="포스팅 태그 목록") + class ResponseBlogPublish(ResponseBase): # 디버깅 용 - metadata: Optional[Dict[str, Any]] = Field(None, description= "포스팅 관련 메타데이터") + metadata: Optional[Dict[str, Any]] = Field( + None, description="포스팅 관련 메타데이터" + ) # 프로덕션 용 - # post_url: str = Field(..., description="포스팅 URL") \ No newline at end of file + # post_url: str = Field(..., description="포스팅 URL") diff --git a/apps/pre-processing-service/app/service/blog/base_blog_post_service.py b/apps/pre-processing-service/app/service/blog/base_blog_post_service.py index b28c1081..ff4b2754 100644 --- a/apps/pre-processing-service/app/service/blog/base_blog_post_service.py +++ b/apps/pre-processing-service/app/service/blog/base_blog_post_service.py @@ -74,7 +74,9 @@ def _get_platform_name(self) -> str: pass @abstractmethod - def _validate_content(self, title: str, content: str, tags: Optional[List[str]] = None) -> None: + def _validate_content( + self, title: str, content: str, tags: Optional[List[str]] = None + ) -> None: """ 공통 유효성 검사 로직 :param title: 포스트 제목 @@ -105,10 +107,10 @@ def post_content(self, title: str, content: str, tags: List[str] = None) -> Dict "platform": self._get_platform_name(), "title": title, "content_length": len(content), - "tags": tags or [] + "tags": tags or [], } def __del__(self): """공통 리소스 정리""" - if hasattr(self, 'web_driver') and self.web_driver: - self.web_driver.quit() \ No newline at end of file + if hasattr(self, "web_driver") and self.web_driver: + self.web_driver.quit() diff --git a/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py b/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py index cd5d1126..07e337d9 100644 --- a/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py +++ b/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py @@ -24,7 +24,7 @@ def __init__(self, config_file="blog_config.json"): self.config_file = config_file self.blogger_service = None self.blog_id = None - self.scopes = ['https://www.googleapis.com/auth/blogger'] + self.scopes = ["https://www.googleapis.com/auth/blogger"] def _requires_webdriver(self) -> bool: """API 기반 서비스는 WebDriver가 필요하지 않음""" @@ -35,18 +35,18 @@ def _load_config(self) -> None: 플랫폼별 설정 로드 """ try: - with open(self.config_file, 'r', encoding='utf-8') as f: + with open(self.config_file, "r", encoding="utf-8") as f: self.config = json.load(f) - self.current_upload_account = self.config['upload_account'] + self.current_upload_account = self.config["upload_account"] except FileNotFoundError: default_config = { "upload_account": "your_account@gmail.com", - "credentials": "credentials.json" + "credentials": "credentials.json", } - with open(self.config_file, 'w', encoding='utf-8') as f: + with open(self.config_file, "w", encoding="utf-8") as f: json.dump(default_config, f, indent=2) self.config = default_config - self.current_upload_account = self.config['upload_account'] + self.current_upload_account = self.config["upload_account"] def _login(self) -> None: """ @@ -63,7 +63,7 @@ def _authenticate_api(self): try: creds = None if os.path.exists(token_file): - with open(token_file, 'rb') as token: + with open(token_file, "rb") as token: creds = pickle.load(token) if not creds or not creds.valid: @@ -72,18 +72,18 @@ def _authenticate_api(self): else: print(f"새 API 인증이 필요합니다: {self.current_upload_account}") flow = InstalledAppFlow.from_client_secrets_file( - self.config['credentials'], self.scopes + self.config["credentials"], self.scopes ) creds = flow.run_local_server(port=0) - with open(token_file, 'wb') as token: + with open(token_file, "wb") as token: pickle.dump(creds, token) - self.blogger_service = build('blogger', 'v3', credentials=creds) + self.blogger_service = build("blogger", "v3", credentials=creds) - blogs = self.blogger_service.blogs().listByUser(userId='self').execute() - if blogs.get('items'): - self.blog_id = blogs['items'][0]['id'] + blogs = self.blogger_service.blogs().listByUser(userId="self").execute() + if blogs.get("items"): + self.blog_id = blogs["items"][0]["id"] print(f"API 설정 완료 - 블로그: {blogs['items'][0]['name']}") return True else: @@ -100,30 +100,28 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No if not self.blogger_service or not self.blog_id: self._authenticate_api() - post_data = { - 'title': title, - 'content': content, - 'labels': tags or [] - } + post_data = {"title": title, "content": content, "labels": tags or []} try: - result = self.blogger_service.posts().insert( - blogId=self.blog_id, - body=post_data - ).execute() + result = ( + self.blogger_service.posts() + .insert(blogId=self.blog_id, body=post_data) + .execute() + ) print(f"포스트 생성 완료: {result.get('url')}") except Exception as e: raise BlogPostPublishException( - platform="Blogger", - reason="API 통신 중 오류가 발생했습니다." + platform="Blogger", reason="API 통신 중 오류가 발생했습니다." ) from e def _get_platform_name(self) -> str: """플랫폼 이름 반환""" return "Blogger" - def _validate_content(self, title: str, content: str, tags: Optional[List[str]] = None) -> None: + def _validate_content( + self, title: str, content: str, tags: Optional[List[str]] = None + ) -> None: """ 공통 유효성 검사 로직 """ @@ -142,4 +140,4 @@ def __del__(self): 리소스 정리 - API 기반 서비스는 별도 정리 불필요 부모 클래스의 __del__이 WebDriver 정리를 처리 """ - super().__del__() \ No newline at end of file + super().__del__() diff --git a/apps/pre-processing-service/app/service/blog/naver_blog_post_service.py b/apps/pre-processing-service/app/service/blog/naver_blog_post_service.py index 0aaf9431..0e33a9fd 100644 --- a/apps/pre-processing-service/app/service/blog/naver_blog_post_service.py +++ b/apps/pre-processing-service/app/service/blog/naver_blog_post_service.py @@ -11,6 +11,7 @@ from app.errors.BlogPostingException import * from app.service.blog.base_blog_post_service import BaseBlogPostService + class NaverBlogPostService(BaseBlogPostService): """네이버 블로그 포스팅 서비스 구현""" @@ -25,7 +26,9 @@ def _load_config(self) -> None: def _get_platform_name(self) -> str: return "NAVER_BLOG" - def _validate_content(self, title: str, content: str, tags: Optional[List[str]] = None) -> None: + def _validate_content( + self, title: str, content: str, tags: Optional[List[str]] = None + ) -> None: """공통 유효성 검사 로직""" if not title or not title.strip(): @@ -53,7 +56,7 @@ def _login(self) -> None: pyperclip.copy(self.id) time.sleep(1) - id_input.send_keys(Keys.COMMAND, 'v') + id_input.send_keys(Keys.COMMAND, "v") time.sleep(1) # 비밀번호 입력 @@ -66,7 +69,7 @@ def _login(self) -> None: pyperclip.copy(self.password) time.sleep(1) - password_input.send_keys(Keys.COMMAND, 'v') + password_input.send_keys(Keys.COMMAND, "v") time.sleep(1) # 로그인 버튼 클릭 @@ -84,7 +87,9 @@ def _login(self) -> None: except TimeoutException: raise PageLoadTimeoutException(self.login_url) except WebDriverConnectionException: - raise BlogServiceUnavailableException("네이버 블로그", "네트워크 연결 오류 또는 페이지 로드 실패") + raise BlogServiceUnavailableException( + "네이버 블로그", "네트워크 연결 오류 또는 페이지 로드 실패" + ) except Exception as e: raise BlogLoginException("네이버 블로그", f"예상치 못한 오류: {str(e)}") @@ -102,7 +107,9 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No # 기존 작성 글 팝업 닫기 (있을 경우) try: cancel = self.wait_driver.until( - EC.element_to_be_clickable((By.CSS_SELECTOR, '.se-popup-button.se-popup-button-cancel')) + EC.element_to_be_clickable( + (By.CSS_SELECTOR, ".se-popup-button.se-popup-button-cancel") + ) ) cancel.click() time.sleep(1) @@ -112,10 +119,13 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No # 제목 입력 try: title_element = self.wait_driver.until( - EC.element_to_be_clickable((By.CSS_SELECTOR, '.se-placeholder.__se_placeholder.se-fs32')) + EC.element_to_be_clickable( + (By.CSS_SELECTOR, ".se-placeholder.__se_placeholder.se-fs32") + ) ) - ActionChains(self.web_driver).move_to_element(title_element).click().pause(0.2).send_keys( - title).perform() + ActionChains(self.web_driver).move_to_element( + title_element + ).click().pause(0.2).send_keys(title).perform() time.sleep(1) except TimeoutException: raise BlogElementInteractionException("제목 입력 필드", "제목 입력") @@ -123,10 +133,15 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No # 본문 입력 try: body_element = self.wait_driver.until( - EC.element_to_be_clickable((By.CSS_SELECTOR, '.se-component.se-text.se-l-default')) + EC.element_to_be_clickable( + (By.CSS_SELECTOR, ".se-component.se-text.se-l-default") + ) ) - ActionChains(self.web_driver).move_to_element(body_element).click().pause(0.2) \ - .send_keys(content).pause(0.2).send_keys(Keys.ENTER).perform() + ActionChains(self.web_driver).move_to_element( + body_element + ).click().pause(0.2).send_keys(content).pause(0.2).send_keys( + Keys.ENTER + ).perform() time.sleep(1) except TimeoutException: raise BlogElementInteractionException("본문 입력 필드", "본문 입력") @@ -134,7 +149,9 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No # 발행 버튼 클릭 try: publish_btn = self.wait_driver.until( - EC.element_to_be_clickable((By.XPATH, "//button[.//span[normalize-space()='발행']]")) + EC.element_to_be_clickable( + (By.XPATH, "//button[.//span[normalize-space()='발행']]") + ) ) try: publish_btn.click() @@ -148,7 +165,9 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No if tags: try: tag_input = self.wait_driver.until( - EC.element_to_be_clickable((By.CSS_SELECTOR, "input[placeholder*='태그']")) + EC.element_to_be_clickable( + (By.CSS_SELECTOR, "input[placeholder*='태그']") + ) ) for tag in tags: tag_input.send_keys(tag) @@ -161,8 +180,12 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No try: time.sleep(1) final_btn = self.wait_driver.until( - EC.element_to_be_clickable((By.XPATH, - "//div[contains(@class,'layer') or contains(@class,'popup') or @role='dialog']//*[self::button or self::a][.//span[normalize-space()='발행']]")) + EC.element_to_be_clickable( + ( + By.XPATH, + "//div[contains(@class,'layer') or contains(@class,'popup') or @role='dialog']//*[self::button or self::a][.//span[normalize-space()='발행']]", + ) + ) ) try: final_btn.click() @@ -178,7 +201,7 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No EC.url_contains("PostView.naver"), EC.url_contains("postList"), EC.url_contains("postList.naver"), - EC.url_contains("entry.naver") + EC.url_contains("entry.naver"), ) ) except TimeoutException: @@ -189,6 +212,10 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No except TimeoutException: raise PageLoadTimeoutException(self.post_content_url) except WebDriverConnectionException: - raise BlogServiceUnavailableException("네이버 블로그", "페이지 로드 중 네트워크 오류") + raise BlogServiceUnavailableException( + "네이버 블로그", "페이지 로드 중 네트워크 오류" + ) except Exception as e: - raise BlogPostPublishException("네이버 블로그", f"예상치 못한 오류: {str(e)}") + raise BlogPostPublishException( + "네이버 블로그", f"예상치 못한 오류: {str(e)}" + ) diff --git a/apps/pre-processing-service/app/service/blog/tistory_blog_post_service.py b/apps/pre-processing-service/app/service/blog/tistory_blog_post_service.py index bcb2abaf..cc830bac 100644 --- a/apps/pre-processing-service/app/service/blog/tistory_blog_post_service.py +++ b/apps/pre-processing-service/app/service/blog/tistory_blog_post_service.py @@ -9,6 +9,7 @@ from app.errors.BlogPostingException import * from app.service.blog.base_blog_post_service import BaseBlogPostService + class TistoryBlogPostService(BaseBlogPostService): """티스토리 블로그 포스팅 서비스""" @@ -24,7 +25,9 @@ def _load_config(self) -> None: def _get_platform_name(self) -> str: return "TISTORY_BLOG" - def _validate_content(self, title: str, content: str, tags: Optional[List[str]] = None) -> None: + def _validate_content( + self, title: str, content: str, tags: Optional[List[str]] = None + ) -> None: """공통 유효성 검사 로직""" if not title or not title.strip(): @@ -81,7 +84,9 @@ def _login(self) -> None: except TimeoutException: raise PageLoadTimeoutException(self.login_url) except WebDriverConnectionException: - raise BlogServiceUnavailableException("티스토리 블로그", "네트워크 연결 오류 또는 페이지 로드 실패") + raise BlogServiceUnavailableException( + "티스토리 블로그", "네트워크 연결 오류 또는 페이지 로드 실패" + ) except Exception as e: raise BlogLoginException("티스토리 블로그", f"예상치 못한 오류: {str(e)}") @@ -107,7 +112,11 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No try: iframe = self.wait_driver.until( EC.presence_of_element_located( - (By.XPATH, "//iframe[contains(@title, 'Rich Text Area') or contains(@id, 'editor')]")) + ( + By.XPATH, + "//iframe[contains(@title, 'Rich Text Area') or contains(@id, 'editor')]", + ) + ) ) self.web_driver.switch_to.frame(iframe) @@ -125,13 +134,15 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No content_selectors = [ "//div[@contenteditable='true']", "//textarea[contains(@class, 'editor')]", - "//div[contains(@class, 'editor')]" + "//div[contains(@class, 'editor')]", ] content_area = None for selector in content_selectors: try: - content_area = self.web_driver.find_element(By.XPATH, selector) + content_area = self.web_driver.find_element( + By.XPATH, selector + ) break except: continue @@ -140,7 +151,9 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No content_area.clear() content_area.send_keys(content) else: - raise BlogElementInteractionException("본문 입력 필드", "본문 입력") + raise BlogElementInteractionException( + "본문 입력 필드", "본문 입력" + ) except Exception: raise BlogElementInteractionException("본문 입력 필드", "본문 입력") @@ -150,7 +163,11 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No try: tag_input = self.wait_driver.until( EC.presence_of_element_located( - (By.XPATH, "//input[@placeholder='태그입력' or contains(@placeholder, '태그')]")) + ( + By.XPATH, + "//input[@placeholder='태그입력' or contains(@placeholder, '태그')]", + ) + ) ) tag_input.clear() @@ -192,27 +209,37 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No publish_selectors = [ "//button[contains(text(), '발행')]", "//button[contains(text(), '저장')]", - "//*[@class='btn_publish' or contains(@class, 'publish')]" + "//*[@class='btn_publish' or contains(@class, 'publish')]", ] for selector in publish_selectors: try: - publish_btn = self.web_driver.find_element(By.XPATH, selector) + publish_btn = self.web_driver.find_element( + By.XPATH, selector + ) publish_btn.click() break except: continue else: - raise BlogPostPublishException("티스토리 블로그", "발행 버튼을 찾을 수 없습니다") + raise BlogPostPublishException( + "티스토리 블로그", "발행 버튼을 찾을 수 없습니다" + ) except Exception: - raise BlogPostPublishException("티스토리 블로그", "발행 과정에서 오류가 발생했습니다") + raise BlogPostPublishException( + "티스토리 블로그", "발행 과정에서 오류가 발생했습니다" + ) except (BlogElementInteractionException, BlogPostPublishException): raise except TimeoutException: raise PageLoadTimeoutException(self.post_content_url) except WebDriverConnectionException: - raise BlogServiceUnavailableException("티스토리 블로그", "페이지 로드 중 네트워크 오류") + raise BlogServiceUnavailableException( + "티스토리 블로그", "페이지 로드 중 네트워크 오류" + ) except Exception as e: - raise BlogPostPublishException("티스토리 블로그", f"예상치 못한 오류: {str(e)}") + raise BlogPostPublishException( + "티스토리 블로그", f"예상치 못한 오류: {str(e)}" + ) diff --git a/apps/pre-processing-service/app/service/crawl_service.py b/apps/pre-processing-service/app/service/crawl_service.py index 829c5a4b..52f68578 100644 --- a/apps/pre-processing-service/app/service/crawl_service.py +++ b/apps/pre-processing-service/app/service/crawl_service.py @@ -17,20 +17,23 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: crawler = DetailCrawler(use_selenium=True) try: - logger.info(f"상품 상세 크롤링 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, product_url={request.product_url}") + logger.info( + f"상품 상세 크롤링 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, product_url={request.product_url}" + ) # 상세 정보 크롤링 실행 product_detail = await crawler.crawl_detail( - product_url=str(request.product_url), - include_images=False + product_url=str(request.product_url), include_images=False ) if not product_detail: logger.error(f"상품 상세 정보 크롤링 실패: url={request.product_url}") raise InvalidItemDataException("상품 상세 정보 크롤링 실패") - product_title = product_detail.get('title', 'Unknown')[:50] - logger.success(f"크롤링 완료: title='{product_title}', price={product_detail.get('price', 0)}, options_count={len(product_detail.get('options', []))}") + product_title = product_detail.get("title", "Unknown")[:50] + logger.success( + f"크롤링 완료: title='{product_title}', price={product_detail.get('price', 0)}, options_count={len(product_detail.get('options', []))}" + ) # 응답 데이터 구성 response_data = { @@ -41,15 +44,19 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: "product_url": str(request.product_url), "product_detail": product_detail, "status": "success", - "crawled_at": time.strftime('%Y-%m-%d %H:%M:%S') + "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), } - logger.info(f"상품 상세 크롤링 서비스 완료: job_id={request.job_id}, status=success") + logger.info( + f"상품 상세 크롤링 서비스 완료: job_id={request.job_id}, status=success" + ) return response_data except Exception as e: - logger.error(f"크롤링 서비스 오류: job_id={request.job_id}, product_url={request.product_url}, error='{e}'") + logger.error( + f"크롤링 서비스 오류: job_id={request.job_id}, product_url={request.product_url}, error='{e}'" + ) raise InvalidItemDataException(f"상품 상세 크롤링 오류: {e}") finally: await crawler.close() - logger.debug("크롤러 리소스 정리 완료") \ No newline at end of file + logger.debug("크롤러 리소스 정리 완료") diff --git a/apps/pre-processing-service/app/service/keyword_service.py b/apps/pre-processing-service/app/service/keyword_service.py index da39aac9..575767ee 100644 --- a/apps/pre-processing-service/app/service/keyword_service.py +++ b/apps/pre-processing-service/app/service/keyword_service.py @@ -8,18 +8,21 @@ from ..errors.CustomException import InvalidItemDataException from ..model.schemas import RequestNaverSearch + async def keyword_search(request: RequestNaverSearch) -> dict: """ 네이버 검색 요청을 처리하는 비즈니스 로직입니다. 입력받은 데이터를 기반으로 응답 데이터를 생성하여 딕셔너리로 반환합니다. """ - #키워드 검색 + # 키워드 검색 if request.tag == "naver": - trending_keywords = await search_naver_rank(**request.model_dump(include={'category', 'start_date', 'end_date'})) + trending_keywords = await search_naver_rank( + **request.model_dump(include={"category", "start_date", "end_date"}) + ) elif request.tag == "naver_store": trending_keywords = await search_naver_store() - else : + else: raise InvalidItemDataException() if not trending_keywords: @@ -31,7 +34,8 @@ async def keyword_search(request: RequestNaverSearch) -> dict: response_data["status"] = "success" return response_data -async def search_naver_rank(category,start_date,end_date) -> dict[int,str]: + +async def search_naver_rank(category, start_date, end_date) -> dict[int, str]: """ 네이버 데이터 랩 키워드 검색 모듈 """ @@ -39,9 +43,9 @@ async def search_naver_rank(category,start_date,end_date) -> dict[int,str]: headers = { "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", "Referer": "https://datalab.naver.com/shoppingInsight/sCategory.naver", - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36", } - keywords_dic ={} + keywords_dic = {} async with httpx.AsyncClient() as client: for page in range(1, 3): payload = { @@ -58,15 +62,19 @@ async def search_naver_rank(category,start_date,end_date) -> dict[int,str]: response = await client.post(url, headers=headers, data=payload) response.raise_for_status() data = response.json() - for item in data.get('ranks', []): - keywords_dic[item.get('rank')] = item.get('keyword') - except (httpx.HTTPStatusError, httpx.RequestError, json.JSONDecodeError) as e: + for item in data.get("ranks", []): + keywords_dic[item.get("rank")] = item.get("keyword") + except ( + httpx.HTTPStatusError, + httpx.RequestError, + json.JSONDecodeError, + ) as e: print(f"네이버 데이터랩에서 데이터를 가져오는 데 실패했습니다: {e}") raise InvalidItemDataException return keywords_dic -async def search_naver_store() -> dict[int,str]: +async def search_naver_store() -> dict[int, str]: """ 네이버 스토어의 일일 인기 검색어 순위 데이터를 가져옵니다. API 응답의 'keyword' 필드를 'title'로 변경하여 전체 순위 목록을 반환합니다. @@ -83,10 +91,10 @@ async def search_naver_store() -> dict[int,str]: keyword_dict = {} for item in data: - keyword_dict[item['rank']] = item['title'] + keyword_dict[item["rank"]] = item["title"] return keyword_dict except (httpx.HTTPStatusError, httpx.RequestError, json.JSONDecodeError) as e: print(f"네이버 스토어에서 데이터를 가져오는 데 실패했습니다: {e}") - raise InvalidItemDataException from e \ No newline at end of file + raise InvalidItemDataException from e diff --git a/apps/pre-processing-service/app/service/match_service.py b/apps/pre-processing-service/app/service/match_service.py index c37a5552..613f301a 100644 --- a/apps/pre-processing-service/app/service/match_service.py +++ b/apps/pre-processing-service/app/service/match_service.py @@ -15,7 +15,9 @@ def match_products(self, request: RequestSadaguMatch) -> dict: keyword = request.keyword products = request.search_results - logger.info(f"키워드 매칭 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}', products_count={len(products) if products else 0}") + logger.info( + f"키워드 매칭 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}', products_count={len(products) if products else 0}" + ) if not products: logger.warning(f"매칭할 상품이 없음: keyword='{keyword}'") @@ -25,17 +27,19 @@ def match_products(self, request: RequestSadaguMatch) -> dict: "schedule_his_id": request.schedule_his_id, "keyword": keyword, "matched_products": [], - "status": "success" + "status": "success", } try: matcher = KeywordMatcher() matched_products = [] - logger.info(f"키워드 '{keyword}'와 {len(products)}개 상품 매칭 분석 시작...") + logger.info( + f"키워드 '{keyword}'와 {len(products)}개 상품 매칭 분석 시작..." + ) for i, product in enumerate(products): - title = product.get('title', '') + title = product.get("title", "") if not title: logger.debug(f"상품 {i + 1}: 제목이 없어서 스킵") continue @@ -47,25 +51,33 @@ def match_products(self, request: RequestSadaguMatch) -> dict: logger.debug(f"상품 {i + 1} 매칭 결과: {match_result['reason']}") - if match_result['is_match']: + if match_result["is_match"]: # 매칭된 상품에 매칭 정보 추가 matched_product = product.copy() - matched_product['match_info'] = { - 'match_type': match_result['match_type'], - 'match_score': match_result['score'], - 'match_reason': match_result['reason'] + matched_product["match_info"] = { + "match_type": match_result["match_type"], + "match_score": match_result["score"], + "match_reason": match_result["reason"], } matched_products.append(matched_product) - logger.info(f"상품 {i + 1} 매칭 성공: title='{title[:30]}', type={match_result['match_type']}, score={match_result['score']:.3f}") + logger.info( + f"상품 {i + 1} 매칭 성공: title='{title[:30]}', type={match_result['match_type']}, score={match_result['score']:.3f}" + ) # 매칭 스코어 기준으로 정렬 (높은 순) - matched_products.sort(key=lambda x: x['match_info']['match_score'], reverse=True) + matched_products.sort( + key=lambda x: x["match_info"]["match_score"], reverse=True + ) - logger.success(f"키워드 매칭 완료: keyword='{keyword}', total_products={len(products)}, matched_products={len(matched_products)}") + logger.success( + f"키워드 매칭 완료: keyword='{keyword}', total_products={len(products)}, matched_products={len(matched_products)}" + ) if matched_products: best_match = matched_products[0] - logger.info(f"최고 매칭 상품: title='{best_match['title'][:30]}', score={best_match['match_info']['match_score']:.3f}") + logger.info( + f"최고 매칭 상품: title='{best_match['title'][:30]}', score={best_match['match_info']['match_score']:.3f}" + ) return { "job_id": request.job_id, @@ -73,9 +85,11 @@ def match_products(self, request: RequestSadaguMatch) -> dict: "schedule_his_id": request.schedule_his_id, "keyword": keyword, "matched_products": matched_products, - "status": "success" + "status": "success", } except Exception as e: - logger.error(f"매칭 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'") - raise InvalidItemDataException(f"키워드 매칭 실패: {str(e)}") \ No newline at end of file + logger.error( + f"매칭 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'" + ) + raise InvalidItemDataException(f"키워드 매칭 실패: {str(e)}") diff --git a/apps/pre-processing-service/app/service/search_service.py b/apps/pre-processing-service/app/service/search_service.py index 073029f8..a130db46 100644 --- a/apps/pre-processing-service/app/service/search_service.py +++ b/apps/pre-processing-service/app/service/search_service.py @@ -16,7 +16,9 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: crawler = SearchCrawler(use_selenium=True) try: - logger.info(f"상품 검색 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}'") + logger.info( + f"상품 검색 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}'" + ) # Selenium 또는 httpx로 상품 검색 if crawler.use_selenium: @@ -32,7 +34,7 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: "schedule_his_id": request.schedule_his_id, "keyword": keyword, "search_results": [], - "status": "success" + "status": "success", } # 상품별 기본 정보 수집 (제목이 없는 경우 다시 크롤링) @@ -42,20 +44,31 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: for i, product in enumerate(search_results): try: # 이미 제목이 있고 유효한 경우 그대로 사용 - if product.get('title') and product['title'] != 'Unknown Title' and len(product['title'].strip()) > 0: + if ( + product.get("title") + and product["title"] != "Unknown Title" + and len(product["title"].strip()) > 0 + ): enriched_results.append(product) - logger.debug(f"상품 {i + 1}: 기존 제목 사용 - '{product['title'][:30]}'") + logger.debug( + f"상품 {i + 1}: 기존 제목 사용 - '{product['title'][:30]}'" + ) else: # 제목이 없거나 유효하지 않은 경우 다시 크롤링 - logger.debug(f"상품 {i + 1}: 제목 재수집 중... ({product['url']})") - basic_info = await crawler.get_basic_product_info(product['url']) + logger.debug( + f"상품 {i + 1}: 제목 재수집 중... ({product['url']})" + ) + basic_info = await crawler.get_basic_product_info( + product["url"] + ) - if basic_info and basic_info['title'] != "제목 없음": - enriched_results.append({ - 'url': product['url'], - 'title': basic_info['title'] - }) - logger.debug(f"상품 {i + 1}: 제목 재수집 성공 - '{basic_info['title'][:30]}'") + if basic_info and basic_info["title"] != "제목 없음": + enriched_results.append( + {"url": product["url"], "title": basic_info["title"]} + ) + logger.debug( + f"상품 {i + 1}: 제목 재수집 성공 - '{basic_info['title'][:30]}'" + ) else: # 그래도 제목을 못 찾으면 제외 logger.debug(f"상품 {i + 1}: 제목 추출 실패, 제외") @@ -67,10 +80,14 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: break except Exception as e: - logger.error(f"상품 {i + 1} 처리 중 오류: url={product.get('url', 'N/A')}, error='{e}'") + logger.error( + f"상품 {i + 1} 처리 중 오류: url={product.get('url', 'N/A')}, error='{e}'" + ) continue - logger.success(f"상품 검색 완료: keyword='{keyword}', 초기검색={len(search_results)}개, 최종유효상품={len(enriched_results)}개") + logger.success( + f"상품 검색 완료: keyword='{keyword}', 초기검색={len(search_results)}개, 최종유효상품={len(enriched_results)}개" + ) return { "job_id": request.job_id, @@ -78,13 +95,15 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: "schedule_his_id": request.schedule_his_id, "keyword": keyword, "search_results": enriched_results, - "status": "success" + "status": "success", } except Exception as e: - logger.error(f"검색 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'") + logger.error( + f"검색 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'" + ) raise InvalidItemDataException(f"상품 검색 실패: {str(e)}") finally: await crawler.close() - logger.debug("검색 크롤러 리소스 정리 완료") \ No newline at end of file + logger.debug("검색 크롤러 리소스 정리 완료") diff --git a/apps/pre-processing-service/app/service/similarity_service.py b/apps/pre-processing-service/app/service/similarity_service.py index a74c3ca1..bd573eec 100644 --- a/apps/pre-processing-service/app/service/similarity_service.py +++ b/apps/pre-processing-service/app/service/similarity_service.py @@ -16,12 +16,16 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict candidates = request.matched_products fallback_products = request.search_results or [] - logger.info(f"유사도 분석 서비스 시작: job_id={request.job_id}, keyword='{keyword}', matched_count={len(candidates) if candidates else 0}, fallback_count={len(fallback_products)}") + logger.info( + f"유사도 분석 서비스 시작: job_id={request.job_id}, keyword='{keyword}', matched_count={len(candidates) if candidates else 0}, fallback_count={len(fallback_products)}" + ) # 매칭된 상품이 없으면 전체 검색 결과로 폴백 if not candidates: if not fallback_products: - logger.warning(f"매칭된 상품과 검색 결과가 모두 없음: keyword='{keyword}'") + logger.warning( + f"매칭된 상품과 검색 결과가 모두 없음: keyword='{keyword}'" + ) return { "job_id": request.job_id, "schedule_id": request.schedule_id, @@ -29,7 +33,7 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict "keyword": keyword, "selected_product": None, "reason": "매칭된 상품과 검색 결과가 모두 없음", - "status": "success" + "status": "success", } logger.info("매칭된 상품 없음 → 전체 검색 결과에서 유사도 분석") @@ -41,7 +45,9 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict try: analyzer = SimilarityAnalyzer() - logger.info(f"키워드 '{keyword}'와 {len(candidates)}개 상품의 유사도 분석 시작... (모드: {analysis_mode})") + logger.info( + f"키워드 '{keyword}'와 {len(candidates)}개 상품의 유사도 분석 시작... (모드: {analysis_mode})" + ) # 한 개만 있으면 바로 선택 if len(candidates) == 1: @@ -49,13 +55,17 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict logger.info("단일 후보 상품 - 유사도 검증 진행") # 유사도 계산 - similarity = analyzer.calculate_similarity(keyword, selected_product['title']) + similarity = analyzer.calculate_similarity( + keyword, selected_product["title"] + ) # 폴백 모드에서는 임계값 검증 if analysis_mode == "fallback_similarity_only": similarity_threshold = 0.3 if similarity < similarity_threshold: - logger.warning(f"단일 상품 유사도 미달: similarity={similarity:.4f} < threshold={similarity_threshold}") + logger.warning( + f"단일 상품 유사도 미달: similarity={similarity:.4f} < threshold={similarity_threshold}" + ) return { "job_id": request.job_id, "schedule_id": request.schedule_id, @@ -63,16 +73,18 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict "keyword": keyword, "selected_product": None, "reason": f"단일 상품 유사도({similarity:.4f}) < 기준({similarity_threshold})", - "status": "success" + "status": "success", } - selected_product['similarity_info'] = { - 'similarity_score': float(similarity), - 'analysis_type': 'single_candidate', - 'analysis_mode': analysis_mode + selected_product["similarity_info"] = { + "similarity_score": float(similarity), + "analysis_type": "single_candidate", + "analysis_mode": analysis_mode, } - logger.success(f"단일 상품 선택 완료: title='{selected_product['title'][:30]}', similarity={similarity:.4f}") + logger.success( + f"단일 상품 선택 완료: title='{selected_product['title'][:30]}', similarity={similarity:.4f}" + ) return { "job_id": request.job_id, @@ -81,29 +93,36 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict "keyword": keyword, "selected_product": selected_product, "reason": f"단일 상품 - 유사도: {similarity:.4f} ({analysis_mode})", - "status": "success" + "status": "success", } # 여러 개가 있으면 유사도 비교 logger.info("여러 상품 중 최고 유사도로 선택...") # 제목만 추출해서 배치 분석 - titles = [product['title'] for product in candidates] + titles = [product["title"] for product in candidates] similarity_results = analyzer.analyze_similarity_batch(keyword, titles) # 결과 출력 logger.info("유사도 분석 결과:") for i, result in enumerate(similarity_results[:5]): # 상위 5개만 로그 - logger.info(f" {i+1}위: {result['title'][:40]} | 유사도: {result['similarity']:.4f}") + logger.info( + f" {i+1}위: {result['title'][:40]} | 유사도: {result['similarity']:.4f}" + ) # 최고 유사도 선택 best_result = similarity_results[0] - selected_product = candidates[best_result['index']].copy() + selected_product = candidates[best_result["index"]].copy() # 폴백 모드에서는 임계값 검증 similarity_threshold = 0.3 - if analysis_mode == "fallback_similarity_only" and best_result['similarity'] < similarity_threshold: - logger.warning(f"최고 유사도 미달: similarity={best_result['similarity']:.4f} < threshold={similarity_threshold}") + if ( + analysis_mode == "fallback_similarity_only" + and best_result["similarity"] < similarity_threshold + ): + logger.warning( + f"최고 유사도 미달: similarity={best_result['similarity']:.4f} < threshold={similarity_threshold}" + ) return { "job_id": request.job_id, "schedule_id": request.schedule_id, @@ -111,31 +130,35 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict "keyword": keyword, "selected_product": None, "reason": f"최고 유사도({best_result['similarity']:.4f}) < 기준({similarity_threshold})", - "status": "success" + "status": "success", } # 유사도 정보 추가 - selected_product['similarity_info'] = { - 'similarity_score': best_result['similarity'], - 'analysis_type': 'multi_candidate_bert', - 'analysis_mode': analysis_mode, - 'rank': 1, - 'total_candidates': len(candidates) + selected_product["similarity_info"] = { + "similarity_score": best_result["similarity"], + "analysis_type": "multi_candidate_bert", + "analysis_mode": analysis_mode, + "rank": 1, + "total_candidates": len(candidates), } # 매칭 모드에서는 종합 점수도 계산 - if analysis_mode == "matched_products" and 'match_info' in selected_product: - match_score = selected_product['match_info']['match_score'] - similarity_score = best_result['similarity'] + if analysis_mode == "matched_products" and "match_info" in selected_product: + match_score = selected_product["match_info"]["match_score"] + similarity_score = best_result["similarity"] # 가중치: 매칭 40%, 유사도 60% final_score = match_score * 0.4 + similarity_score * 0.6 - selected_product['final_score'] = final_score + selected_product["final_score"] = final_score reason = f"종합점수({final_score:.4f}) = 매칭({match_score:.4f})*0.4 + 유사도({similarity_score:.4f})*0.6" - logger.info(f"종합 점수 계산: match_score={match_score:.4f}, similarity_score={similarity_score:.4f}, final_score={final_score:.4f}") + logger.info( + f"종합 점수 계산: match_score={match_score:.4f}, similarity_score={similarity_score:.4f}, final_score={final_score:.4f}" + ) else: reason = f"유사도({best_result['similarity']:.4f}) 기준 선택 ({analysis_mode})" - logger.success(f"상품 선택 완료: title='{selected_product['title'][:30]}', {reason}") + logger.success( + f"상품 선택 완료: title='{selected_product['title'][:30]}', {reason}" + ) return { "job_id": request.job_id, @@ -144,9 +167,11 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict "keyword": keyword, "selected_product": selected_product, "reason": reason, - "status": "success" + "status": "success", } except Exception as e: - logger.error(f"유사도 분석 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'") - raise InvalidItemDataException(f"유사도 분석 실패: {str(e)}") \ No newline at end of file + logger.error( + f"유사도 분석 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'" + ) + raise InvalidItemDataException(f"유사도 분석 실패: {str(e)}") diff --git a/apps/pre-processing-service/app/test/test_keyword.py b/apps/pre-processing-service/app/test/test_keyword.py index e0432139..2a96796e 100644 --- a/apps/pre-processing-service/app/test/test_keyword.py +++ b/apps/pre-processing-service/app/test/test_keyword.py @@ -10,17 +10,20 @@ def test_read_root(): - response = client.get("/keyword/") + response = client.get("/keywords/") assert response.status_code == 200 assert response.json() == {"message": "keyword API"} -@pytest.mark.parametrize("tag, category, start_date, end_date", [ - ("naver", "50000000", "2025-09-01", "2025-09-02"), - ("naver", "50000001", "2025-09-01", "2025-09-02"), - ("naver", "50000002", "2025-09-01", "2025-09-02"), - ("naver_store", "", "2025-09-01", "2025-09-02"), -]) +@pytest.mark.parametrize( + "tag, category, start_date, end_date", + [ + ("naver", "50000000", "2025-09-01", "2025-09-02"), + ("naver", "50000001", "2025-09-01", "2025-09-02"), + ("naver", "50000002", "2025-09-01", "2025-09-02"), + ("naver_store", "", "2025-09-01", "2025-09-02"), + ], +) def test_search(tag, category, start_date, end_date): body = { "job_id": JOB_ID, @@ -29,10 +32,10 @@ def test_search(tag, category, start_date, end_date): "tag": tag, "category": category, "start_date": start_date, - "end_date": end_date + "end_date": end_date, } - response = client.post("/keyword/search", json=body) + response = client.post("/keywords/search", json=body) assert response.status_code == 200 response_data = response.json() @@ -41,4 +44,4 @@ def test_search(tag, category, start_date, end_date): assert response_data["schedule_his_id"] == body["schedule_his_id"] # 오타 수정 assert response_data["status"] == "success" assert "keyword" in response_data - assert isinstance(response_data["total_keyword"], dict) \ No newline at end of file + assert isinstance(response_data["total_keyword"], dict) diff --git a/apps/pre-processing-service/app/test/test_mariadb_connection.py b/apps/pre-processing-service/app/test/test_mariadb_connection.py index 43902fb4..985d0e08 100644 --- a/apps/pre-processing-service/app/test/test_mariadb_connection.py +++ b/apps/pre-processing-service/app/test/test_mariadb_connection.py @@ -19,13 +19,13 @@ def setup_method(self): """각 테스트 메서드 실행 전 초기화""" MariadbManager._instance = None - if hasattr(MariadbManager, '_initialized'): + if hasattr(MariadbManager, "_initialized"): MariadbManager._initialized = False def teardown_method(self): """각 테스트 메서드 실행 후 정리""" - if MariadbManager._instance and hasattr(MariadbManager._instance, '_pool'): + if MariadbManager._instance and hasattr(MariadbManager._instance, "_pool"): if MariadbManager._instance._pool: MariadbManager._instance.close_pool() MariadbManager._instance = None @@ -63,15 +63,15 @@ def test_environment_variables_load(self): manager = MariadbManager() config = manager._config - required_keys = ['host', 'port', 'database', 'user', 'password'] + required_keys = ["host", "port", "database", "user", "password"] for key in required_keys: assert key in config, f"필수 설정 {key}가 누락되었습니다" assert config[key] is not None, f"설정 {key}의 값이 None입니다" if isinstance(config[key], str): - assert config[key].strip() != '', f"설정 {key}의 값이 비어있습니다" + assert config[key].strip() != "", f"설정 {key}의 값이 비어있습니다" - assert isinstance(config['port'], int), "포트는 정수여야 합니다" - assert config['port'] > 0, "포트는 양수여야 합니다" + assert isinstance(config["port"], int), "포트는 정수여야 합니다" + assert config["port"] > 0, "포트는 양수여야 합니다" def test_connection_pool_initialization(self): """커넥션풀 초기화 테스트""" diff --git a/apps/pre-processing-service/app/test/test_match_service.py b/apps/pre-processing-service/app/test/test_match_service.py index 7b80c258..7750cd3d 100644 --- a/apps/pre-processing-service/app/test/test_match_service.py +++ b/apps/pre-processing-service/app/test/test_match_service.py @@ -10,16 +10,16 @@ def test_match_success(): sample_search_results = [ { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=123", - "title": "925 실버 반지 여성용 결혼반지" + "title": "925 실버 반지 여성용 결혼반지", }, { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=456", - "title": "골드 목걸이 체인 펜던트" + "title": "골드 목걸이 체인 펜던트", }, { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=789", - "title": "반지 세트 커플링 약혼반지" - } + "title": "반지 세트 커플링 약혼반지", + }, ] body = { @@ -27,10 +27,10 @@ def test_match_success(): "schedule_id": 1, "schedule_his_id": 1, "keyword": "반지", - "search_results": sample_search_results + "search_results": sample_search_results, } - response = client.post("/product/match", json=body) + response = client.post("/products/match", json=body) print(f"Match Response: {response.json()}") assert response.status_code == 200 @@ -55,10 +55,10 @@ def test_match_no_results(): "schedule_id": 2, "schedule_his_id": 2, "keyword": "반지", - "search_results": [] + "search_results": [], } - response = client.post("/product/match", json=body) + response = client.post("/products/match", json=body) print(f"No results response: {response.json()}") assert response.status_code == 200 @@ -71,12 +71,12 @@ def test_match_no_matches(): sample_search_results = [ { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=123", - "title": "컴퓨터 키보드 게이밍" + "title": "컴퓨터 키보드 게이밍", }, { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=456", - "title": "스마트폰 케이스 투명" - } + "title": "스마트폰 케이스 투명", + }, ] body = { @@ -84,14 +84,14 @@ def test_match_no_matches(): "schedule_id": 3, "schedule_his_id": 3, "keyword": "반지", - "search_results": sample_search_results + "search_results": sample_search_results, } - response = client.post("/product/match", json=body) + response = client.post("/products/match", json=body) print(f"No matches response: {response.json()}") assert response.status_code == 200 data = response.json() # 매칭되지 않아도 성공으로 처리 assert data["status"] == "success" - assert isinstance(data["matched_products"], list) \ No newline at end of file + assert isinstance(data["matched_products"], list) diff --git a/apps/pre-processing-service/app/test/test_sadagu_crawl.py b/apps/pre-processing-service/app/test/test_sadagu_crawl.py index d034be43..6c6ad84a 100644 --- a/apps/pre-processing-service/app/test/test_sadagu_crawl.py +++ b/apps/pre-processing-service/app/test/test_sadagu_crawl.py @@ -13,10 +13,10 @@ def test_crawl_success(): "tag": "detail", "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=886788894790", "use_selenium": False, - "include_images": False + "include_images": False, } - response = client.post("/product/crawl", json=body) + response = client.post("/products/crawl", json=body) print(f"Response: {response.json()}") assert response.status_code == 200 @@ -27,62 +27,62 @@ def test_crawl_success(): assert "product_detail" in data -def test_crawl_invalid_url(): - """잘못된 URL이지만 페이지는 존재하는 경우""" - body = { - "job_id": 2, - "schedule_id": 2, - "schedule_his_id": 2, - "tag": "detail", - "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=invalid", - "use_selenium": False, - "include_images": False - } - - response = client.post("/product/crawl", json=body) - print(f"Response: {response.json()}") - - assert response.status_code == 200 - data = response.json() - - product_detail = data.get("product_detail", {}) - assert product_detail.get("title") in ["제목 없음", "제목 추출 실패", None] - assert product_detail.get("price", 0) == 0 - - -def test_crawl_completely_invalid_url(): - """완전히 존재하지 않는 도메인""" - body = { - "job_id": 3, - "schedule_id": 3, - "schedule_his_id": 3, - "tag": "detail", - "product_url": "https://nonexistent-domain-12345.com/invalid", - "use_selenium": False, - "include_images": False - } - - response = client.post("/product/crawl", json=body) - print(f"Response: {response.json()}") - - assert response.status_code in (400, 422, 500) - - -def test_crawl_include_images(): - body = { - "job_id": 4, - "schedule_id": 4, - "schedule_his_id": 4, - "tag": "detail", - "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=886788894790", - "use_selenium": False, - "include_images": True - } - - response = client.post("/product/crawl", json=body) - print(f"Response: {response.json()}") - - assert response.status_code == 200 - data = response.json() - assert data["include_images"] is True - assert isinstance(data["product_detail"].get("product_images"), list) \ No newline at end of file +# def test_crawl_invalid_url(): +# """잘못된 URL이지만 페이지는 존재하는 경우""" +# body = { +# "job_id": 2, +# "schedule_id": 2, +# "schedule_his_id": 2, +# "tag": "detail", +# "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=invalid", +# "use_selenium": False, +# "include_images": False, +# } +# +# response = client.post("/products/crawl", json=body) +# print(f"Response: {response.json()}") +# +# assert response.status_code == 200 +# data = response.json() +# +# product_detail = data.get("product_detail", {}) +# assert product_detail.get("title") in ["제목 없음", "제목 추출 실패", None] +# assert product_detail.get("price", 0) == 0 + + +# def test_crawl_completely_invalid_url(): +# """완전히 존재하지 않는 도메인""" +# body = { +# "job_id": 3, +# "schedule_id": 3, +# "schedule_his_id": 3, +# "tag": "detail", +# "product_url": "https://nonexistent-domain-12345.com/invalid", +# "use_selenium": False, +# "include_images": False, +# } +# +# response = client.post("/products/crawl", json=body) +# print(f"Response: {response.json()}") +# +# assert response.status_code in (400, 422, 500) + + +# def test_crawl_include_images(): +# body = { +# "job_id": 4, +# "schedule_id": 4, +# "schedule_his_id": 4, +# "tag": "detail", +# "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=886788894790", +# "use_selenium": False, +# "include_images": True, +# } +# +# response = client.post("/products/crawl", json=body) +# print(f"Response: {response.json()}") +# +# assert response.status_code == 200 +# data = response.json() +# assert data["include_images"] is True +# assert isinstance(data["product_detail"].get("product_images"), list) diff --git a/apps/pre-processing-service/app/test/test_search_service.py b/apps/pre-processing-service/app/test/test_search_service.py index 6dd415e0..fc64c9cd 100644 --- a/apps/pre-processing-service/app/test/test_search_service.py +++ b/apps/pre-processing-service/app/test/test_search_service.py @@ -7,14 +7,9 @@ def test_search_success(): """상품 검색 성공 테스트""" - body = { - "job_id": 1, - "schedule_id": 1, - "schedule_his_id": 1, - "keyword": "반지" - } + body = {"job_id": 1, "schedule_id": 1, "schedule_his_id": 1, "keyword": "반지"} - response = client.post("/product/search", json=body) + response = client.post("/products/search", json=body) print(f"Search Response: {response.json()}") assert response.status_code == 200 @@ -27,14 +22,9 @@ def test_search_success(): def test_search_empty_keyword(): """빈 키워드 검색 테스트""" - body = { - "job_id": 2, - "schedule_id": 2, - "schedule_his_id": 2, - "keyword": "" - } + body = {"job_id": 2, "schedule_id": 2, "schedule_his_id": 2, "keyword": ""} - response = client.post("/product/search", json=body) + response = client.post("/products/search", json=body) print(f"Empty keyword response: {response.json()}") # 빈 키워드라도 에러가 아닌 빈 결과를 반환해야 함 @@ -49,14 +39,14 @@ def test_search_nonexistent_keyword(): "job_id": 3, "schedule_id": 3, "schedule_his_id": 3, - "keyword": "zxcvbnmasdfghjklqwertyuiop123456789" + "keyword": "zxcvbnmasdfghjklqwertyuiop123456789", } - response = client.post("/product/search", json=body) + response = client.post("/products/search", json=body) print(f"Nonexistent keyword response: {response.json()}") assert response.status_code == 200 data = response.json() # 검색 결과가 없어도 성공으로 처리 assert data["status"] == "success" - assert isinstance(data["search_results"], list) \ No newline at end of file + assert isinstance(data["search_results"], list) diff --git a/apps/pre-processing-service/app/test/test_similarity_service.py b/apps/pre-processing-service/app/test/test_similarity_service.py index 1888b873..cb84d3c3 100644 --- a/apps/pre-processing-service/app/test/test_similarity_service.py +++ b/apps/pre-processing-service/app/test/test_similarity_service.py @@ -14,8 +14,8 @@ def test_similarity_with_matched_products(): "match_info": { "match_type": "exact", "match_score": 1.0, - "match_reason": "완전 매칭" - } + "match_reason": "완전 매칭", + }, }, { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=456", @@ -23,9 +23,9 @@ def test_similarity_with_matched_products(): "match_info": { "match_type": "morphological", "match_score": 0.8, - "match_reason": "형태소 매칭" - } - } + "match_reason": "형태소 매칭", + }, + }, ] body = { @@ -33,10 +33,10 @@ def test_similarity_with_matched_products(): "schedule_id": 1, "schedule_his_id": 1, "keyword": "반지", - "matched_products": matched_products + "matched_products": matched_products, } - response = client.post("/product/similarity", json=body) + response = client.post("/products/similarity", json=body) print(f"Similarity Response: {response.json()}") assert response.status_code == 200 @@ -56,12 +56,12 @@ def test_similarity_fallback_to_search_results(): search_results = [ { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=123", - "title": "실버 링 악세서리" + "title": "실버 링 악세서리", }, { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=456", - "title": "골드 반지 여성" - } + "title": "골드 반지 여성", + }, ] body = { @@ -70,10 +70,10 @@ def test_similarity_fallback_to_search_results(): "schedule_his_id": 2, "keyword": "반지", "matched_products": [], # 매칭된 상품 없음 - "search_results": search_results # 폴백용 + "search_results": search_results, # 폴백용 } - response = client.post("/product/similarity", json=body) + response = client.post("/products/similarity", json=body) print(f"Fallback Response: {response.json()}") assert response.status_code == 200 @@ -83,7 +83,10 @@ def test_similarity_fallback_to_search_results(): # 폴백 모드에서는 임계값을 통과한 경우에만 상품이 선택됨 if data["selected_product"]: assert "similarity_info" in data["selected_product"] - assert data["selected_product"]["similarity_info"]["analysis_mode"] == "fallback_similarity_only" + assert ( + data["selected_product"]["similarity_info"]["analysis_mode"] + == "fallback_similarity_only" + ) def test_similarity_single_candidate(): @@ -92,10 +95,7 @@ def test_similarity_single_candidate(): { "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=123", "title": "925 실버 반지 여성용", - "match_info": { - "match_type": "exact", - "match_score": 1.0 - } + "match_info": {"match_type": "exact", "match_score": 1.0}, } ] @@ -104,16 +104,19 @@ def test_similarity_single_candidate(): "schedule_id": 3, "schedule_his_id": 3, "keyword": "반지", - "matched_products": single_product + "matched_products": single_product, } - response = client.post("/product/similarity", json=body) + response = client.post("/products/similarity", json=body) print(f"Single candidate response: {response.json()}") assert response.status_code == 200 data = response.json() assert data["selected_product"] is not None - assert data["selected_product"]["similarity_info"]["analysis_type"] == "single_candidate" + assert ( + data["selected_product"]["similarity_info"]["analysis_type"] + == "single_candidate" + ) def test_similarity_no_candidates(): @@ -124,13 +127,13 @@ def test_similarity_no_candidates(): "schedule_his_id": 4, "keyword": "반지", "matched_products": [], - "search_results": [] + "search_results": [], } - response = client.post("/product/similarity", json=body) + response = client.post("/products/similarity", json=body) print(f"No candidates response: {response.json()}") assert response.status_code == 200 data = response.json() assert data["selected_product"] is None - assert "검색 결과가 모두 없음" in data["reason"] \ No newline at end of file + assert "검색 결과가 모두 없음" in data["reason"] diff --git a/apps/pre-processing-service/app/utils/crawler_utils.py b/apps/pre-processing-service/app/utils/crawler_utils.py index c952ad09..5c593b9f 100644 --- a/apps/pre-processing-service/app/utils/crawler_utils.py +++ b/apps/pre-processing-service/app/utils/crawler_utils.py @@ -24,13 +24,13 @@ def __init__(self, use_selenium=True): def _setup_selenium(self): """Selenium WebDriver 초기화""" chrome_options = Options() - chrome_options.add_argument('--headless') - chrome_options.add_argument('--no-sandbox') - chrome_options.add_argument('--disable-dev-shm-usage') - chrome_options.add_argument('--disable-gpu') - chrome_options.add_argument('--window-size=1920,1080') + chrome_options.add_argument("--headless") + chrome_options.add_argument("--no-sandbox") + chrome_options.add_argument("--disable-dev-shm-usage") + chrome_options.add_argument("--disable-gpu") + chrome_options.add_argument("--window-size=1920,1080") chrome_options.add_argument( - '--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' + "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36" ) try: @@ -46,9 +46,9 @@ def _setup_httpx(self): """httpx 클라이언트 초기화""" self.client = httpx.AsyncClient( headers={ - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36" }, - timeout=30.0 + timeout=30.0, ) logger.info("httpx 클라이언트 초기화 완료") @@ -58,7 +58,9 @@ async def search_products_selenium(self, keyword: str) -> list[dict]: search_url = f"{self.base_url}/shop/search.php?ss_tx={encoded_keyword}" try: - logger.info(f"Selenium 상품 검색 시작: keyword='{keyword}', url='{search_url}'") + logger.info( + f"Selenium 상품 검색 시작: keyword='{keyword}', url='{search_url}'" + ) self.driver.get(search_url) time.sleep(5) @@ -66,30 +68,30 @@ async def search_products_selenium(self, keyword: str) -> list[dict]: link_elements = self.driver.find_elements(By.TAG_NAME, "a") for element in link_elements: - href = element.get_attribute('href') - if href and 'view.php' in href and ('platform=1688' in href or 'num_iid' in href): + href = element.get_attribute("href") + if ( + href + and "view.php" in href + and ("platform=1688" in href or "num_iid" in href) + ): try: - title = element.get_attribute('title') or element.text.strip() + title = element.get_attribute("title") or element.text.strip() if title: - product_links.append({ - 'url': href, - 'title': title - }) + product_links.append({"url": href, "title": title}) except: - product_links.append({ - 'url': href, - 'title': 'Unknown Title' - }) + product_links.append({"url": href, "title": "Unknown Title"}) # 중복 제거 seen_urls = set() unique_products = [] for product in product_links: - if product['url'] not in seen_urls: - seen_urls.add(product['url']) + if product["url"] not in seen_urls: + seen_urls.add(product["url"]) unique_products.append(product) - logger.info(f"Selenium으로 발견한 상품 링크: {len(unique_products)}개 (중복 제거 전: {len(product_links)}개)") + logger.info( + f"Selenium으로 발견한 상품 링크: {len(unique_products)}개 (중복 제거 전: {len(product_links)}개)" + ) return unique_products[:20] except Exception as e: @@ -102,24 +104,31 @@ async def search_products_httpx(self, keyword: str) -> list[dict]: search_url = f"{self.base_url}/shop/search.php?ss_tx={encoded_keyword}" try: - logger.info(f"httpx 상품 검색 시작: keyword='{keyword}', url='{search_url}'") + logger.info( + f"httpx 상품 검색 시작: keyword='{keyword}', url='{search_url}'" + ) response = await self.client.get(search_url) response.raise_for_status() - soup = BeautifulSoup(response.content, 'html.parser') + soup = BeautifulSoup(response.content, "html.parser") product_links = [] - all_links = soup.find_all('a', href=True) + all_links = soup.find_all("a", href=True) for link in all_links: - href = link['href'] - if 'view.php' in href and ('platform=1688' in href or 'num_iid' in href): - full_url = f"{self.base_url}{href}" if href.startswith('/') else href - title = link.get('title', '') or link.get_text(strip=True) or 'Unknown Title' - - product_links.append({ - 'url': full_url, - 'title': title - }) + href = link["href"] + if "view.php" in href and ( + "platform=1688" in href or "num_iid" in href + ): + full_url = ( + f"{self.base_url}{href}" if href.startswith("/") else href + ) + title = ( + link.get("title", "") + or link.get_text(strip=True) + or "Unknown Title" + ) + + product_links.append({"url": full_url, "title": title}) logger.info(f"httpx로 발견한 상품 링크: {len(product_links)}개") return product_links[:20] @@ -135,21 +144,21 @@ async def get_basic_product_info(self, product_url: str) -> dict: if self.use_selenium: self.driver.get(product_url) - self.wait.until(lambda driver: driver.execute_script("return document.readyState") == "complete") - soup = BeautifulSoup(self.driver.page_source, 'html.parser') + self.wait.until( + lambda driver: driver.execute_script("return document.readyState") + == "complete" + ) + soup = BeautifulSoup(self.driver.page_source, "html.parser") else: response = await self.client.get(product_url) response.raise_for_status() - soup = BeautifulSoup(response.content, 'html.parser') + soup = BeautifulSoup(response.content, "html.parser") - title_element = soup.find('h1', {'id': 'kakaotitle'}) + title_element = soup.find("h1", {"id": "kakaotitle"}) title = title_element.get_text(strip=True) if title_element else "제목 없음" logger.debug(f"기본 상품 정보 크롤링 완료: title='{title[:50]}'") - return { - 'url': product_url, - 'title': title - } + return {"url": product_url, "title": title} except Exception as e: logger.error(f"기본 상품 크롤링 오류: url='{product_url}', error='{e}'") @@ -157,13 +166,13 @@ async def get_basic_product_info(self, product_url: str) -> dict: async def close(self): """리소스 정리""" - if self.use_selenium and hasattr(self, 'driver'): + if self.use_selenium and hasattr(self, "driver"): try: self.driver.quit() logger.info("Selenium WebDriver 종료 완료") except Exception as e: logger.warning(f"Selenium WebDriver 종료 중 오류: {e}") - elif hasattr(self, 'client'): + elif hasattr(self, "client"): try: await self.client.aclose() logger.info("httpx 클라이언트 종료 완료") @@ -174,10 +183,14 @@ async def close(self): class DetailCrawler(SearchCrawler): """SearchCrawler를 확장한 상세 크롤링 클래스""" - async def crawl_detail(self, product_url: str, include_images: bool = False) -> dict: + async def crawl_detail( + self, product_url: str, include_images: bool = False + ) -> dict: """상품 상세 정보 크롤링""" try: - logger.info(f"상품 상세 크롤링 시작: url='{product_url}', include_images={include_images}") + logger.info( + f"상품 상세 크롤링 시작: url='{product_url}', include_images={include_images}" + ) if self.use_selenium: soup = await self._get_soup_selenium(product_url) @@ -192,25 +205,28 @@ async def crawl_detail(self, product_url: str, include_images: bool = False) -> material_info = self._extract_material_info(soup) product_data = { - 'url': product_url, - 'title': title, - 'price': price, - 'rating': rating, - 'options': options, - 'material_info': material_info, - 'crawled_at': time.strftime('%Y-%m-%d %H:%M:%S') + "url": product_url, + "title": title, + "price": price, + "rating": rating, + "options": options, + "material_info": material_info, + "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), } logger.info( - f"기본 상품 정보 추출 완료: title='{title[:50]}', price={price}, rating={rating}, options_count={len(options)}") + f"기본 상품 정보 추출 완료: title='{title[:50]}', price={price}, rating={rating}, options_count={len(options)}" + ) if include_images: logger.info("이미지 정보 추출 중...") product_images = self._extract_images(soup) - product_data['product_images'] = [{'original_url': img_url} for img_url in product_images] + product_data["product_images"] = [ + {"original_url": img_url} for img_url in product_images + ] logger.info(f"추출된 이미지: {len(product_images)}개") else: - product_data['product_images'] = [] + product_data["product_images"] = [] logger.info(f"상품 상세 크롤링 완료: url='{product_url}'") return product_data @@ -224,10 +240,13 @@ async def _get_soup_selenium(self, product_url: str) -> BeautifulSoup: try: logger.debug(f"Selenium HTML 로딩 시작: url='{product_url}'") self.driver.get(product_url) - self.wait.until(lambda driver: driver.execute_script("return document.readyState") == "complete") + self.wait.until( + lambda driver: driver.execute_script("return document.readyState") + == "complete" + ) time.sleep(2) logger.debug("Selenium HTML 로딩 완료") - return BeautifulSoup(self.driver.page_source, 'html.parser') + return BeautifulSoup(self.driver.page_source, "html.parser") except Exception as e: logger.error(f"Selenium HTML 로딩 실패: url='{product_url}', error='{e}'") raise Exception(f"Selenium HTML 로딩 실패: {e}") @@ -239,14 +258,14 @@ async def _get_soup_httpx(self, product_url: str) -> BeautifulSoup: response = await self.client.get(product_url) response.raise_for_status() logger.debug("httpx HTML 요청 완료") - return BeautifulSoup(response.content, 'html.parser') + return BeautifulSoup(response.content, "html.parser") except Exception as e: logger.error(f"httpx HTML 요청 실패: url='{product_url}', error='{e}'") raise Exception(f"HTTP 요청 실패: {e}") def _extract_title(self, soup: BeautifulSoup) -> str: """제목 추출""" - title_element = soup.find('h1', {'id': 'kakaotitle'}) + title_element = soup.find("h1", {"id": "kakaotitle"}) title = title_element.get_text(strip=True) if title_element else "제목 없음" logger.debug(f"제목 추출: '{title[:50]}'") return title @@ -255,17 +274,21 @@ def _extract_price(self, soup: BeautifulSoup) -> int: """가격 추출""" price = 0 price_selectors = [ - 'span.price.gsItemPriceKWR', - '.pdt_price span.price', - 'span.price', - '.price' + "span.price.gsItemPriceKWR", + ".pdt_price span.price", + "span.price", + ".price", ] for selector in price_selectors: price_element = soup.select_one(selector) if price_element: - price_text = price_element.get_text(strip=True).replace(',', '').replace('원', '') - price_match = re.search(r'(\d+)', price_text) + price_text = ( + price_element.get_text(strip=True) + .replace(",", "") + .replace("원", "") + ) + price_match = re.search(r"(\d+)", price_text) if price_match: price = int(price_match.group(1)) logger.debug(f"가격 추출 성공: {price}원 (selector: {selector})") @@ -280,19 +303,19 @@ def _extract_rating(self, soup: BeautifulSoup) -> float: """평점 추출""" rating = 0.0 star_containers = [ - soup.find('a', class_='start'), - soup.find('div', class_=re.compile(r'star|rating')), - soup.find('a', href='#reviews_wrap') + soup.find("a", class_="start"), + soup.find("div", class_=re.compile(r"star|rating")), + soup.find("a", href="#reviews_wrap"), ] for container in star_containers: if container: - star_imgs = container.find_all('img') + star_imgs = container.find_all("img") for img in star_imgs: - src = img.get('src', '') - if 'icon_star.svg' in src: + src = img.get("src", "") + if "icon_star.svg" in src: rating += 1 - elif 'icon_star_half.svg' in src: + elif "icon_star_half.svg" in src: rating += 0.5 if rating > 0: logger.debug(f"평점 추출 성공: {rating}점") @@ -306,36 +329,38 @@ def _extract_rating(self, soup: BeautifulSoup) -> float: def _extract_options(self, soup: BeautifulSoup) -> list[dict]: """상품 옵션 추출""" options = [] - sku_list = soup.find('ul', {'id': 'skubox'}) + sku_list = soup.find("ul", {"id": "skubox"}) if sku_list: - option_items = sku_list.find_all('li', class_=re.compile(r'imgWrapper')) + option_items = sku_list.find_all("li", class_=re.compile(r"imgWrapper")) logger.debug(f"옵션 항목 발견: {len(option_items)}개") for item in option_items: - title_element = item.find('a', title=True) + title_element = item.find("a", title=True) if title_element: - option_name = title_element.get('title', '').strip() + option_name = title_element.get("title", "").strip() # 재고 정보 추출 stock = 0 item_text = item.get_text() - stock_match = re.search(r'재고\s*:\s*(\d+)', item_text) + stock_match = re.search(r"재고\s*:\s*(\d+)", item_text) if stock_match: stock = int(stock_match.group(1)) # 이미지 URL 추출 - img_element = item.find('img', class_='colorSpec_hashPic') + img_element = item.find("img", class_="colorSpec_hashPic") image_url = "" - if img_element and img_element.get('src'): - image_url = img_element['src'] + if img_element and img_element.get("src"): + image_url = img_element["src"] if option_name: - options.append({ - 'name': option_name, - 'stock': stock, - 'image_url': image_url - }) + options.append( + { + "name": option_name, + "stock": stock, + "image_url": image_url, + } + ) logger.debug(f"옵션 추출: name='{option_name}', stock={stock}") logger.info(f"총 {len(options)}개 옵션 추출 완료") @@ -344,11 +369,11 @@ def _extract_options(self, soup: BeautifulSoup) -> list[dict]: def _extract_material_info(self, soup: BeautifulSoup) -> dict: """소재 정보 추출""" material_info = {} - info_items = soup.find_all('div', class_='pro-info-item') + info_items = soup.find_all("div", class_="pro-info-item") for item in info_items: - title_element = item.find('div', class_='pro-info-title') - info_element = item.find('div', class_='pro-info-info') + title_element = item.find("div", class_="pro-info-title") + info_element = item.find("div", class_="pro-info-info") if title_element and info_element: title = title_element.get_text(strip=True) @@ -362,16 +387,16 @@ def _extract_material_info(self, soup: BeautifulSoup) -> dict: def _extract_images(self, soup: BeautifulSoup) -> list[str]: """상품 이미지 추출""" images = [] - img_elements = soup.find_all('img', {'id': re.compile(r'img_translate_\d+')}) + img_elements = soup.find_all("img", {"id": re.compile(r"img_translate_\d+")}) for img in img_elements: - src = img.get('src', '') + src = img.get("src", "") if src: - if src.startswith('//'): - src = 'https:' + src - elif src.startswith('/'): + if src.startswith("//"): + src = "https:" + src + elif src.startswith("/"): src = self.base_url + src - elif src.startswith('http'): + elif src.startswith("http"): pass else: continue @@ -379,4 +404,4 @@ def _extract_images(self, soup: BeautifulSoup) -> list[str]: logger.debug(f"이미지 URL 추출: {src}") logger.info(f"총 {len(images)}개 이미지 URL 추출 완료") - return images \ No newline at end of file + return images diff --git a/apps/pre-processing-service/app/utils/crawling_util.py b/apps/pre-processing-service/app/utils/crawling_util.py index 8b0f1501..8ec47518 100644 --- a/apps/pre-processing-service/app/utils/crawling_util.py +++ b/apps/pre-processing-service/app/utils/crawling_util.py @@ -2,6 +2,7 @@ from selenium.webdriver.chrome.options import Options from selenium.webdriver.support.ui import WebDriverWait + class CrawlingUtil: def __init__(self): @@ -20,14 +21,16 @@ def _get_chrome_options(self): options = Options() - options.add_argument('--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36') + options.add_argument( + "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36" + ) # options.add_argument('--headless') 백그라운드 실행시 주석 해제 options.add_argument("--no-sandbox") options.add_argument("--disable-dev-shm-usage") options.add_argument("--disable-gpu") options.add_argument("--disable-extensions") options.add_experimental_option("excludeSwitches", ["enable-automation"]) - options.add_experimental_option('useAutomationExtension', False) + options.add_experimental_option("useAutomationExtension", False) options.add_argument("--disable-blink-features=AutomationControlled") return options diff --git a/apps/pre-processing-service/app/utils/keyword_matcher.py b/apps/pre-processing-service/app/utils/keyword_matcher.py index 69d87413..e9ae48ac 100644 --- a/apps/pre-processing-service/app/utils/keyword_matcher.py +++ b/apps/pre-processing-service/app/utils/keyword_matcher.py @@ -7,7 +7,9 @@ logger.info("MeCab 라이브러리 로딩 성공") MECAB_AVAILABLE = True except ImportError: - logger.warning("MeCab 라이브러리를 찾을 수 없습니다. pip install mecab-python3 를 실행해주세요.") + logger.warning( + "MeCab 라이브러리를 찾을 수 없습니다. pip install mecab-python3 를 실행해주세요." + ) MeCab = None MECAB_AVAILABLE = False @@ -31,42 +33,50 @@ def __init__(self): test_result = self.mecab.parse("테스트") if test_result and test_result.strip(): self.konlpy_available = True - logger.info(f"MeCab 형태소 분석기 사용 가능 (경로: {settings.mecab_path or '기본'})") + logger.info( + f"MeCab 형태소 분석기 사용 가능 (경로: {settings.mecab_path or '기본'})" + ) else: logger.warning("MeCab 테스트 실패") except Exception as e: logger.error(f"MeCab 사용 불가 (규칙 기반으로 대체): {e}") else: - logger.warning("MeCab 라이브러리가 설치되지 않았습니다. 규칙 기반으로 대체합니다.") + logger.warning( + "MeCab 라이브러리가 설치되지 않았습니다. 규칙 기반으로 대체합니다." + ) def analyze_keyword_match(self, title: str, keyword: str) -> dict: """키워드 매칭 분석 결과 반환""" title_lower = title.lower().strip() keyword_lower = keyword.lower().strip() - logger.debug(f"키워드 매칭 분석 시작: title='{title[:50]}', keyword='{keyword}'") + logger.debug( + f"키워드 매칭 분석 시작: title='{title[:50]}', keyword='{keyword}'" + ) # 1. 완전 포함 검사 exact_match = keyword_lower in title_lower if exact_match: - logger.info(f"완전 포함 매칭 성공: keyword='{keyword}' in title='{title[:50]}'") + logger.info( + f"완전 포함 매칭 성공: keyword='{keyword}' in title='{title[:50]}'" + ) return { - 'is_match': True, - 'match_type': 'exact', - 'score': 1.0, - 'reason': f"완전 포함: '{keyword}' in '{title[:50]}'" + "is_match": True, + "match_type": "exact", + "score": 1.0, + "reason": f"완전 포함: '{keyword}' in '{title[:50]}'", } # 2. 형태소 분석 (MeCab 사용) if self.konlpy_available: morphological_result = self._morphological_match(title_lower, keyword_lower) - if morphological_result['is_match']: + if morphological_result["is_match"]: logger.info(f"형태소 분석 매칭 성공: {morphological_result['reason']}") return morphological_result # 3. 규칙 기반 분석 (MeCab 실패시) simple_result = self._simple_keyword_match(title_lower, keyword_lower) - if simple_result['is_match']: + if simple_result["is_match"]: logger.info(f"규칙 기반 매칭 성공: {simple_result['reason']}") else: logger.debug(f"매칭 실패: {simple_result['reason']}") @@ -81,10 +91,10 @@ def _morphological_match(self, title: str, keyword: str) -> dict: # 키워드 형태소 분석 keyword_result = self.mecab.parse(keyword) keyword_morphs = [] - for line in keyword_result.split('\n'): - if line == 'EOS' or line == '': + for line in keyword_result.split("\n"): + if line == "EOS" or line == "": continue - parts = line.split('\t') + parts = line.split("\t") if len(parts) >= 1: morph = parts[0].strip() if len(morph) >= 1: @@ -93,16 +103,18 @@ def _morphological_match(self, title: str, keyword: str) -> dict: # 제목 형태소 분석 title_result = self.mecab.parse(title) title_morphs = [] - for line in title_result.split('\n'): - if line == 'EOS' or line == '': + for line in title_result.split("\n"): + if line == "EOS" or line == "": continue - parts = line.split('\t') + parts = line.split("\t") if len(parts) >= 1: morph = parts[0].strip() if len(morph) >= 1: title_morphs.append(morph) - logger.debug(f"형태소 추출 완료: keyword_morphs={keyword_morphs}, title_morphs={title_morphs}") + logger.debug( + f"형태소 추출 완료: keyword_morphs={keyword_morphs}, title_morphs={title_morphs}" + ) # 형태소 매칭 matched = 0 @@ -118,20 +130,28 @@ def _morphological_match(self, title: str, keyword: str) -> dict: threshold = 0.4 logger.debug( - f"형태소 매칭 결과: matched={matched}, total={len(keyword_morphs)}, ratio={match_ratio:.3f}, threshold={threshold}") + f"형태소 매칭 결과: matched={matched}, total={len(keyword_morphs)}, ratio={match_ratio:.3f}, threshold={threshold}" + ) if match_ratio >= threshold: return { - 'is_match': True, - 'match_type': 'morphological', - 'score': match_ratio, - 'reason': f"형태소 매칭: {matched}/{len(keyword_morphs)} = {match_ratio:.3f}" + "is_match": True, + "match_type": "morphological", + "score": match_ratio, + "reason": f"형태소 매칭: {matched}/{len(keyword_morphs)} = {match_ratio:.3f}", } except Exception as e: - logger.error(f"형태소 분석 오류: keyword='{keyword}', title='{title[:30]}', error='{e}'") + logger.error( + f"형태소 분석 오류: keyword='{keyword}', title='{title[:30]}', error='{e}'" + ) - return {'is_match': False, 'match_type': 'morphological', 'score': 0.0, 'reason': '형태소 분석 실패'} + return { + "is_match": False, + "match_type": "morphological", + "score": 0.0, + "reason": "형태소 분석 실패", + } def _simple_keyword_match(self, title: str, keyword: str) -> dict: """간단한 키워드 매칭""" @@ -141,7 +161,9 @@ def _simple_keyword_match(self, title: str, keyword: str) -> dict: title_words = title.split() keyword_words = keyword.split() - logger.debug(f"단어 분리 완료: title_words={title_words}, keyword_words={keyword_words}") + logger.debug( + f"단어 분리 완료: title_words={title_words}, keyword_words={keyword_words}" + ) matched = 0 for kw in keyword_words: @@ -156,19 +178,20 @@ def _simple_keyword_match(self, title: str, keyword: str) -> dict: threshold = 0.3 logger.debug( - f"규칙 기반 매칭 결과: matched={matched}, total={len(keyword_words)}, ratio={match_ratio:.3f}, threshold={threshold}") + f"규칙 기반 매칭 결과: matched={matched}, total={len(keyword_words)}, ratio={match_ratio:.3f}, threshold={threshold}" + ) if match_ratio >= threshold: return { - 'is_match': True, - 'match_type': 'simple', - 'score': match_ratio, - 'reason': f"규칙 기반 매칭: {matched}/{len(keyword_words)} = {match_ratio:.3f}" + "is_match": True, + "match_type": "simple", + "score": match_ratio, + "reason": f"규칙 기반 매칭: {matched}/{len(keyword_words)} = {match_ratio:.3f}", } return { - 'is_match': False, - 'match_type': 'simple', - 'score': match_ratio, - 'reason': f"규칙 기반 미달: {matched}/{len(keyword_words)} = {match_ratio:.3f} < {threshold}" - } \ No newline at end of file + "is_match": False, + "match_type": "simple", + "score": match_ratio, + "reason": f"규칙 기반 미달: {matched}/{len(keyword_words)} = {match_ratio:.3f} < {threshold}", + } diff --git a/apps/pre-processing-service/app/utils/similarity_analyzer.py b/apps/pre-processing-service/app/utils/similarity_analyzer.py index 61dd9348..f1c3104e 100644 --- a/apps/pre-processing-service/app/utils/similarity_analyzer.py +++ b/apps/pre-processing-service/app/utils/similarity_analyzer.py @@ -11,15 +11,17 @@ class SimilarityAnalyzer: def __init__(self): try: logger.info("KLUE BERT 모델 로딩 시도 중...") - self.tokenizer = AutoTokenizer.from_pretrained('klue/bert-base') - self.model = AutoModel.from_pretrained('klue/bert-base') + self.tokenizer = AutoTokenizer.from_pretrained("klue/bert-base") + self.model = AutoModel.from_pretrained("klue/bert-base") logger.success("KLUE BERT 모델 로딩 성공") except Exception as e: logger.warning(f"KLUE BERT 로딩 실패, 다국어 BERT로 대체: {e}") try: logger.info("다국어 BERT 모델 로딩 시도 중...") - self.tokenizer = AutoTokenizer.from_pretrained('bert-base-multilingual-cased') - self.model = AutoModel.from_pretrained('bert-base-multilingual-cased') + self.tokenizer = AutoTokenizer.from_pretrained( + "bert-base-multilingual-cased" + ) + self.model = AutoModel.from_pretrained("bert-base-multilingual-cased") logger.success("다국어 BERT 모델 로딩 성공") except Exception as e2: logger.error(f"모든 BERT 모델 로딩 실패: {e2}") @@ -29,7 +31,9 @@ def get_embedding(self, text: str) -> np.ndarray: """텍스트 임베딩 생성""" try: logger.debug(f"임베딩 생성 시작: text='{text[:50]}'") - inputs = self.tokenizer(text, return_tensors='pt', padding=True, truncation=True, max_length=128) + inputs = self.tokenizer( + text, return_tensors="pt", padding=True, truncation=True, max_length=128 + ) with torch.no_grad(): outputs = self.model(**inputs) embedding = outputs.last_hidden_state[:, 0, :].numpy() @@ -42,19 +46,27 @@ def get_embedding(self, text: str) -> np.ndarray: def calculate_similarity(self, text1: str, text2: str) -> float: """두 텍스트 간 유사도 계산""" try: - logger.debug(f"유사도 계산 시작: text1='{text1[:30]}', text2='{text2[:30]}'") + logger.debug( + f"유사도 계산 시작: text1='{text1[:30]}', text2='{text2[:30]}'" + ) embedding1 = self.get_embedding(text1) embedding2 = self.get_embedding(text2) similarity = cosine_similarity(embedding1, embedding2)[0][0] logger.debug(f"유사도 계산 완료: similarity={similarity:.4f}") return similarity except Exception as e: - logger.error(f"유사도 계산 오류: text1='{text1[:30]}', text2='{text2[:30]}', error='{e}'") + logger.error( + f"유사도 계산 오류: text1='{text1[:30]}', text2='{text2[:30]}', error='{e}'" + ) raise - def analyze_similarity_batch(self, keyword: str, product_titles: list[str]) -> list[dict]: + def analyze_similarity_batch( + self, keyword: str, product_titles: list[str] + ) -> list[dict]: """배치로 유사도 분석""" - logger.info(f"배치 유사도 분석 시작: keyword='{keyword}', titles_count={len(product_titles)}") + logger.info( + f"배치 유사도 분석 시작: keyword='{keyword}', titles_count={len(product_titles)}" + ) try: keyword_embedding = self.get_embedding(keyword) @@ -62,30 +74,37 @@ def analyze_similarity_batch(self, keyword: str, product_titles: list[str]) -> l for i, title in enumerate(product_titles): try: - logger.debug(f"유사도 계산 중 ({i + 1}/{len(product_titles)}): title='{title[:30]}'") + logger.debug( + f"유사도 계산 중 ({i + 1}/{len(product_titles)}): title='{title[:30]}'" + ) title_embedding = self.get_embedding(title) - similarity = cosine_similarity(keyword_embedding, title_embedding)[0][0] + similarity = cosine_similarity(keyword_embedding, title_embedding)[ + 0 + ][0] - results.append({ - 'index': i, - 'title': title, - 'similarity': float(similarity), - 'score': float(similarity) - }) - logger.debug(f"유사도 계산 완료 ({i + 1}/{len(product_titles)}): similarity={similarity:.4f}") + results.append( + { + "index": i, + "title": title, + "similarity": float(similarity), + "score": float(similarity), + } + ) + logger.debug( + f"유사도 계산 완료 ({i + 1}/{len(product_titles)}): similarity={similarity:.4f}" + ) except Exception as e: logger.error(f"유사도 계산 오류 (제목: {title[:30]}): {e}") - results.append({ - 'index': i, - 'title': title, - 'similarity': 0.0, - 'score': 0.0 - }) + results.append( + {"index": i, "title": title, "similarity": 0.0, "score": 0.0} + ) # 유사도 기준 내림차순 정렬 - results.sort(key=lambda x: x['similarity'], reverse=True) - logger.info(f"배치 유사도 분석 완료: 총 {len(results)}개, 최고 유사도={results[0]['similarity']:.4f}") + results.sort(key=lambda x: x["similarity"], reverse=True) + logger.info( + f"배치 유사도 분석 완료: 총 {len(results)}개, 최고 유사도={results[0]['similarity']:.4f}" + ) return results except Exception as e: logger.error(f"배치 유사도 분석 실패: keyword='{keyword}', error='{e}'") - raise \ No newline at end of file + raise diff --git a/apps/pre-processing-service/poetry.lock b/apps/pre-processing-service/poetry.lock index 09cb01d5..26eab19f 100644 --- a/apps/pre-processing-service/poetry.lock +++ b/apps/pre-processing-service/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -139,6 +139,51 @@ charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] +[[package]] +name = "black" +version = "25.1.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, + {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, + {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, + {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, + {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, + {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, + {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, + {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, + {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, + {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, + {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, + {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, + {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, + {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, + {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, + {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, + {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, + {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, + {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, + {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, + {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, + {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "bs4" version = "0.0.2" @@ -154,6 +199,18 @@ files = [ [package.dependencies] beautifulsoup4 = "*" +[[package]] +name = "cachetools" +version = "5.5.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, +] + [[package]] name = "certifi" version = "2025.8.3" @@ -359,7 +416,7 @@ version = "8.2.1" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.10" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, @@ -374,7 +431,7 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main"] +groups = ["main", "dev"] markers = "platform_system == \"Windows\" or sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, @@ -398,20 +455,6 @@ docs = ["docutils"] pg = ["PyGreSQL (>=5)"] tests = ["pytest (>=7)", "ruff"] -[[package]] -name = "dotenv" -version = "0.9.9" -description = "Deprecated package" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "dotenv-0.9.9-py2.py3-none-any.whl", hash = "sha256:29cf74a087b31dafdb5a446b6d7e11cbce8ed2741540e2339c69fbef92c94ce9"}, -] - -[package.dependencies] -python-dotenv = "*" - [[package]] name = "fastapi" version = "0.116.1" @@ -486,6 +529,148 @@ test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask[dataframe,test]", "moto test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard ; python_version < \"3.14\""] tqdm = ["tqdm"] +[[package]] +name = "google" +version = "3.0.0" +description = "Python bindings to the Google search engine." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "google-3.0.0-py2.py3-none-any.whl", hash = "sha256:889cf695f84e4ae2c55fbc0cfdaf4c1e729417fa52ab1db0485202ba173e4935"}, + {file = "google-3.0.0.tar.gz", hash = "sha256:143530122ee5130509ad5e989f0512f7cb218b2d4eddbafbad40fd10e8d8ccbe"}, +] + +[package.dependencies] +beautifulsoup4 = "*" + +[[package]] +name = "google-api-core" +version = "2.25.1" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_api_core-2.25.1-py3-none-any.whl", hash = "sha256:8a2a56c1fef82987a524371f99f3bd0143702fecc670c72e600c1cda6bf8dbb7"}, + {file = "google_api_core-2.25.1.tar.gz", hash = "sha256:d2aaa0b13c78c61cb3f4282c464c046e45fbd75755683c9c525e6e8f7ed0a5e8"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.0" +googleapis-common-protos = ">=1.56.2,<2.0.0" +proto-plus = [ + {version = ">=1.25.0,<2.0.0", markers = "python_version >= \"3.13\""}, + {version = ">=1.22.3,<2.0.0", markers = "python_version < \"3.13\""}, +] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" +requests = ">=2.18.0,<3.0.0" + +[package.extras] +async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.0)"] +grpc = ["grpcio (>=1.33.2,<2.0.0)", "grpcio (>=1.49.1,<2.0.0) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.0)", "grpcio-status (>=1.49.1,<2.0.0) ; python_version >= \"3.11\""] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] + +[[package]] +name = "google-api-python-client" +version = "2.181.0" +description = "Google API Client Library for Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_api_python_client-2.181.0-py3-none-any.whl", hash = "sha256:348730e3ece46434a01415f3d516d7a0885c8e624ce799f50f2d4d86c2475fb7"}, + {file = "google_api_python_client-2.181.0.tar.gz", hash = "sha256:d7060962a274a16a2c6f8fb4b1569324dbff11bfbca8eb050b88ead1dd32261c"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0" +google-auth = ">=1.32.0,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0" +google-auth-httplib2 = ">=0.2.0,<1.0.0" +httplib2 = ">=0.19.0,<1.0.0" +uritemplate = ">=3.0.1,<5" + +[[package]] +name = "google-auth" +version = "2.40.3" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_auth-2.40.3-py2.py3-none-any.whl", hash = "sha256:1370d4593e86213563547f97a92752fc658456fe4514c809544f330fed45a7ca"}, + {file = "google_auth-2.40.3.tar.gz", hash = "sha256:500c3a29adedeb36ea9cf24b8d10858e152f2412e3ca37829b3fa18e33d63b77"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] +pyjwt = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyjwt (>=2.0)"] +pyopenssl = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0)"] +testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "flask", "freezegun", "grpcio", "mock", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] +urllib3 = ["packaging", "urllib3"] + +[[package]] +name = "google-auth-httplib2" +version = "0.2.0" +description = "Google Authentication Library: httplib2 transport" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"}, + {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"}, +] + +[package.dependencies] +google-auth = "*" +httplib2 = ">=0.19.0" + +[[package]] +name = "google-auth-oauthlib" +version = "1.2.2" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "google_auth_oauthlib-1.2.2-py3-none-any.whl", hash = "sha256:fd619506f4b3908b5df17b65f39ca8d66ea56986e5472eb5978fd8f3786f00a2"}, + {file = "google_auth_oauthlib-1.2.2.tar.gz", hash = "sha256:11046fb8d3348b296302dd939ace8af0a724042e8029c1b872d87fabc9f41684"}, +] + +[package.dependencies] +google-auth = ">=2.15.0" +requests-oauthlib = ">=0.7.0" + +[package.extras] +tool = ["click (>=6.0.0)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.70.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8"}, + {file = "googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257"}, +] + +[package.dependencies] +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0)"] + [[package]] name = "greenlet" version = "3.2.4" @@ -633,6 +818,21 @@ http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] trio = ["trio (>=0.22.0,<1.0)"] +[[package]] +name = "httplib2" +version = "0.30.0" +description = "A comprehensive HTTP client library." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "httplib2-0.30.0-py3-none-any.whl", hash = "sha256:d10443a2bdfe0ea5dbb17e016726146d48b574208dafd41e854cf34e7d78842c"}, + {file = "httplib2-0.30.0.tar.gz", hash = "sha256:d5b23c11fcf8e57e00ff91b7008656af0f6242c8886fd97065c97509e4e548c5"}, +] + +[package.dependencies] +pyparsing = ">=3.0.4,<4" + [[package]] name = "httpx" version = "0.28.1" @@ -718,12 +918,42 @@ version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["dev"] files = [ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] +[[package]] +name = "jinja2" +version = "3.1.6" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.5.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241"}, + {file = "joblib-1.5.2.tar.gz", hash = "sha256:3faa5c39054b2f03ca547da9b2f52fde67c06240c31853f306aea97f13647b55"}, +] + [[package]] name = "loguru" version = "0.7.3" @@ -743,6 +973,77 @@ win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + [[package]] name = "mecab-python3" version = "1.0.10" @@ -796,90 +1097,158 @@ files = [ unidic = ["unidic"] unidic-lite = ["unidic-lite"] +[[package]] +name = "mpmath" +version = "1.3.0" +description = "Python library for arbitrary-precision floating-point arithmetic" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, + {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, +] + +[package.extras] +develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] +docs = ["sphinx"] +gmpy = ["gmpy2 (>=2.1.0a4) ; platform_python_implementation != \"PyPy\""] +tests = ["pytest (>=4.6)"] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "networkx" +version = "3.5" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = ">=3.11" +groups = ["main"] +files = [ + {file = "networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec"}, + {file = "networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037"}, +] + +[package.extras] +default = ["matplotlib (>=3.8)", "numpy (>=1.25)", "pandas (>=2.0)", "scipy (>=1.11.2)"] +developer = ["mypy (>=1.15)", "pre-commit (>=4.1)"] +doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=10)", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8.0)", "sphinx-gallery (>=0.18)", "texext (>=0.6.7)"] +example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=2.0.0)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] +extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)", "pytest-xdist (>=3.0)"] +test-extras = ["pytest-mpl", "pytest-randomly"] + [[package]] name = "numpy" -version = "2.3.2" +version = "2.3.3" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.11" groups = ["main"] files = [ - {file = "numpy-2.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:852ae5bed3478b92f093e30f785c98e0cb62fa0a939ed057c31716e18a7a22b9"}, - {file = "numpy-2.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a0e27186e781a69959d0230dd9909b5e26024f8da10683bd6344baea1885168"}, - {file = "numpy-2.3.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:f0a1a8476ad77a228e41619af2fa9505cf69df928e9aaa165746584ea17fed2b"}, - {file = "numpy-2.3.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:cbc95b3813920145032412f7e33d12080f11dc776262df1712e1638207dde9e8"}, - {file = "numpy-2.3.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f75018be4980a7324edc5930fe39aa391d5734531b1926968605416ff58c332d"}, - {file = "numpy-2.3.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20b8200721840f5621b7bd03f8dcd78de33ec522fc40dc2641aa09537df010c3"}, - {file = "numpy-2.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f91e5c028504660d606340a084db4b216567ded1056ea2b4be4f9d10b67197f"}, - {file = "numpy-2.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fb1752a3bb9a3ad2d6b090b88a9a0ae1cd6f004ef95f75825e2f382c183b2097"}, - {file = "numpy-2.3.2-cp311-cp311-win32.whl", hash = "sha256:4ae6863868aaee2f57503c7a5052b3a2807cf7a3914475e637a0ecd366ced220"}, - {file = "numpy-2.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:240259d6564f1c65424bcd10f435145a7644a65a6811cfc3201c4a429ba79170"}, - {file = "numpy-2.3.2-cp311-cp311-win_arm64.whl", hash = "sha256:4209f874d45f921bde2cff1ffcd8a3695f545ad2ffbef6d3d3c6768162efab89"}, - {file = "numpy-2.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bc3186bea41fae9d8e90c2b4fb5f0a1f5a690682da79b92574d63f56b529080b"}, - {file = "numpy-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f4f0215edb189048a3c03bd5b19345bdfa7b45a7a6f72ae5945d2a28272727f"}, - {file = "numpy-2.3.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b1224a734cd509f70816455c3cffe13a4f599b1bf7130f913ba0e2c0b2006c0"}, - {file = "numpy-2.3.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3dcf02866b977a38ba3ec10215220609ab9667378a9e2150615673f3ffd6c73b"}, - {file = "numpy-2.3.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:572d5512df5470f50ada8d1972c5f1082d9a0b7aa5944db8084077570cf98370"}, - {file = "numpy-2.3.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8145dd6d10df13c559d1e4314df29695613575183fa2e2d11fac4c208c8a1f73"}, - {file = "numpy-2.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:103ea7063fa624af04a791c39f97070bf93b96d7af7eb23530cd087dc8dbe9dc"}, - {file = "numpy-2.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc927d7f289d14f5e037be917539620603294454130b6de200091e23d27dc9be"}, - {file = "numpy-2.3.2-cp312-cp312-win32.whl", hash = "sha256:d95f59afe7f808c103be692175008bab926b59309ade3e6d25009e9a171f7036"}, - {file = "numpy-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:9e196ade2400c0c737d93465327d1ae7c06c7cb8a1756121ebf54b06ca183c7f"}, - {file = "numpy-2.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:ee807923782faaf60d0d7331f5e86da7d5e3079e28b291973c545476c2b00d07"}, - {file = "numpy-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c8d9727f5316a256425892b043736d63e89ed15bbfe6556c5ff4d9d4448ff3b3"}, - {file = "numpy-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:efc81393f25f14d11c9d161e46e6ee348637c0a1e8a54bf9dedc472a3fae993b"}, - {file = "numpy-2.3.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dd937f088a2df683cbb79dda9a772b62a3e5a8a7e76690612c2737f38c6ef1b6"}, - {file = "numpy-2.3.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:11e58218c0c46c80509186e460d79fbdc9ca1eb8d8aee39d8f2dc768eb781089"}, - {file = "numpy-2.3.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ad4ebcb683a1f99f4f392cc522ee20a18b2bb12a2c1c42c3d48d5a1adc9d3d2"}, - {file = "numpy-2.3.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:938065908d1d869c7d75d8ec45f735a034771c6ea07088867f713d1cd3bbbe4f"}, - {file = "numpy-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:66459dccc65d8ec98cc7df61307b64bf9e08101f9598755d42d8ae65d9a7a6ee"}, - {file = "numpy-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a7af9ed2aa9ec5950daf05bb11abc4076a108bd3c7db9aa7251d5f107079b6a6"}, - {file = "numpy-2.3.2-cp313-cp313-win32.whl", hash = "sha256:906a30249315f9c8e17b085cc5f87d3f369b35fedd0051d4a84686967bdbbd0b"}, - {file = "numpy-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:c63d95dc9d67b676e9108fe0d2182987ccb0f11933c1e8959f42fa0da8d4fa56"}, - {file = "numpy-2.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:b05a89f2fb84d21235f93de47129dd4f11c16f64c87c33f5e284e6a3a54e43f2"}, - {file = "numpy-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e6ecfeddfa83b02318f4d84acf15fbdbf9ded18e46989a15a8b6995dfbf85ab"}, - {file = "numpy-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:508b0eada3eded10a3b55725b40806a4b855961040180028f52580c4729916a2"}, - {file = "numpy-2.3.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:754d6755d9a7588bdc6ac47dc4ee97867271b17cee39cb87aef079574366db0a"}, - {file = "numpy-2.3.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f66e7d2b2d7712410d3bc5684149040ef5f19856f20277cd17ea83e5006286"}, - {file = "numpy-2.3.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de6ea4e5a65d5a90c7d286ddff2b87f3f4ad61faa3db8dabe936b34c2275b6f8"}, - {file = "numpy-2.3.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3ef07ec8cbc8fc9e369c8dcd52019510c12da4de81367d8b20bc692aa07573a"}, - {file = "numpy-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:27c9f90e7481275c7800dc9c24b7cc40ace3fdb970ae4d21eaff983a32f70c91"}, - {file = "numpy-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:07b62978075b67eee4065b166d000d457c82a1efe726cce608b9db9dd66a73a5"}, - {file = "numpy-2.3.2-cp313-cp313t-win32.whl", hash = "sha256:c771cfac34a4f2c0de8e8c97312d07d64fd8f8ed45bc9f5726a7e947270152b5"}, - {file = "numpy-2.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:72dbebb2dcc8305c431b2836bcc66af967df91be793d63a24e3d9b741374c450"}, - {file = "numpy-2.3.2-cp313-cp313t-win_arm64.whl", hash = "sha256:72c6df2267e926a6d5286b0a6d556ebe49eae261062059317837fda12ddf0c1a"}, - {file = "numpy-2.3.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:448a66d052d0cf14ce9865d159bfc403282c9bc7bb2a31b03cc18b651eca8b1a"}, - {file = "numpy-2.3.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:546aaf78e81b4081b2eba1d105c3b34064783027a06b3ab20b6eba21fb64132b"}, - {file = "numpy-2.3.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:87c930d52f45df092f7578889711a0768094debf73cfcde105e2d66954358125"}, - {file = "numpy-2.3.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:8dc082ea901a62edb8f59713c6a7e28a85daddcb67454c839de57656478f5b19"}, - {file = "numpy-2.3.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af58de8745f7fa9ca1c0c7c943616c6fe28e75d0c81f5c295810e3c83b5be92f"}, - {file = "numpy-2.3.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed5527c4cf10f16c6d0b6bee1f89958bccb0ad2522c8cadc2efd318bcd545f5"}, - {file = "numpy-2.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:095737ed986e00393ec18ec0b21b47c22889ae4b0cd2d5e88342e08b01141f58"}, - {file = "numpy-2.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5e40e80299607f597e1a8a247ff8d71d79c5b52baa11cc1cce30aa92d2da6e0"}, - {file = "numpy-2.3.2-cp314-cp314-win32.whl", hash = "sha256:7d6e390423cc1f76e1b8108c9b6889d20a7a1f59d9a60cac4a050fa734d6c1e2"}, - {file = "numpy-2.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:b9d0878b21e3918d76d2209c924ebb272340da1fb51abc00f986c258cd5e957b"}, - {file = "numpy-2.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:2738534837c6a1d0c39340a190177d7d66fdf432894f469728da901f8f6dc910"}, - {file = "numpy-2.3.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:4d002ecf7c9b53240be3bb69d80f86ddbd34078bae04d87be81c1f58466f264e"}, - {file = "numpy-2.3.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:293b2192c6bcce487dbc6326de5853787f870aeb6c43f8f9c6496db5b1781e45"}, - {file = "numpy-2.3.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0a4f2021a6da53a0d580d6ef5db29947025ae8b35b3250141805ea9a32bbe86b"}, - {file = "numpy-2.3.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9c144440db4bf3bb6372d2c3e49834cc0ff7bb4c24975ab33e01199e645416f2"}, - {file = "numpy-2.3.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f92d6c2a8535dc4fe4419562294ff957f83a16ebdec66df0805e473ffaad8bd0"}, - {file = "numpy-2.3.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cefc2219baa48e468e3db7e706305fcd0c095534a192a08f31e98d83a7d45fb0"}, - {file = "numpy-2.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:76c3e9501ceb50b2ff3824c3589d5d1ab4ac857b0ee3f8f49629d0de55ecf7c2"}, - {file = "numpy-2.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:122bf5ed9a0221b3419672493878ba4967121514b1d7d4656a7580cd11dddcbf"}, - {file = "numpy-2.3.2-cp314-cp314t-win32.whl", hash = "sha256:6f1ae3dcb840edccc45af496f312528c15b1f79ac318169d094e85e4bb35fdf1"}, - {file = "numpy-2.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:087ffc25890d89a43536f75c5fe8770922008758e8eeeef61733957041ed2f9b"}, - {file = "numpy-2.3.2-cp314-cp314t-win_arm64.whl", hash = "sha256:092aeb3449833ea9c0bf0089d70c29ae480685dd2377ec9cdbbb620257f84631"}, - {file = "numpy-2.3.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:14a91ebac98813a49bc6aa1a0dfc09513dcec1d97eaf31ca21a87221a1cdcb15"}, - {file = "numpy-2.3.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:71669b5daae692189540cffc4c439468d35a3f84f0c88b078ecd94337f6cb0ec"}, - {file = "numpy-2.3.2-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:69779198d9caee6e547adb933941ed7520f896fd9656834c300bdf4dd8642712"}, - {file = "numpy-2.3.2-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:2c3271cc4097beb5a60f010bcc1cc204b300bb3eafb4399376418a83a1c6373c"}, - {file = "numpy-2.3.2-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8446acd11fe3dc1830568c941d44449fd5cb83068e5c70bd5a470d323d448296"}, - {file = "numpy-2.3.2-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa098a5ab53fa407fded5870865c6275a5cd4101cfdef8d6fafc48286a96e981"}, - {file = "numpy-2.3.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6936aff90dda378c09bea075af0d9c675fe3a977a9d2402f95a87f440f59f619"}, - {file = "numpy-2.3.2.tar.gz", hash = "sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48"}, + {file = "numpy-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ffc4f5caba7dfcbe944ed674b7eef683c7e94874046454bb79ed7ee0236f59d"}, + {file = "numpy-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7e946c7170858a0295f79a60214424caac2ffdb0063d4d79cb681f9aa0aa569"}, + {file = "numpy-2.3.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:cd4260f64bc794c3390a63bf0728220dd1a68170c169088a1e0dfa2fde1be12f"}, + {file = "numpy-2.3.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:f0ddb4b96a87b6728df9362135e764eac3cfa674499943ebc44ce96c478ab125"}, + {file = "numpy-2.3.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:afd07d377f478344ec6ca2b8d4ca08ae8bd44706763d1efb56397de606393f48"}, + {file = "numpy-2.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bc92a5dedcc53857249ca51ef29f5e5f2f8c513e22cfb90faeb20343b8c6f7a6"}, + {file = "numpy-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7af05ed4dc19f308e1d9fc759f36f21921eb7bbfc82843eeec6b2a2863a0aefa"}, + {file = "numpy-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:433bf137e338677cebdd5beac0199ac84712ad9d630b74eceeb759eaa45ddf30"}, + {file = "numpy-2.3.3-cp311-cp311-win32.whl", hash = "sha256:eb63d443d7b4ffd1e873f8155260d7f58e7e4b095961b01c91062935c2491e57"}, + {file = "numpy-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:ec9d249840f6a565f58d8f913bccac2444235025bbb13e9a4681783572ee3caa"}, + {file = "numpy-2.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:74c2a948d02f88c11a3c075d9733f1ae67d97c6bdb97f2bb542f980458b257e7"}, + {file = "numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cfdd09f9c84a1a934cde1eec2267f0a43a7cd44b2cca4ff95b7c0d14d144b0bf"}, + {file = "numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb32e3cf0f762aee47ad1ddc6672988f7f27045b0783c887190545baba73aa25"}, + {file = "numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396b254daeb0a57b1fe0ecb5e3cff6fa79a380fa97c8f7781a6d08cd429418fe"}, + {file = "numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:067e3d7159a5d8f8a0b46ee11148fc35ca9b21f61e3c49fbd0a027450e65a33b"}, + {file = "numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c02d0629d25d426585fb2e45a66154081b9fa677bc92a881ff1d216bc9919a8"}, + {file = "numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9192da52b9745f7f0766531dcfa978b7763916f158bb63bdb8a1eca0068ab20"}, + {file = "numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cd7de500a5b66319db419dc3c345244404a164beae0d0937283b907d8152e6ea"}, + {file = "numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:93d4962d8f82af58f0b2eb85daaf1b3ca23fe0a85d0be8f1f2b7bb46034e56d7"}, + {file = "numpy-2.3.3-cp312-cp312-win32.whl", hash = "sha256:5534ed6b92f9b7dca6c0a19d6df12d41c68b991cef051d108f6dbff3babc4ebf"}, + {file = "numpy-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:497d7cad08e7092dba36e3d296fe4c97708c93daf26643a1ae4b03f6294d30eb"}, + {file = "numpy-2.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:ca0309a18d4dfea6fc6262a66d06c26cfe4640c3926ceec90e57791a82b6eee5"}, + {file = "numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f5415fb78995644253370985342cd03572ef8620b934da27d77377a2285955bf"}, + {file = "numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d00de139a3324e26ed5b95870ce63be7ec7352171bc69a4cf1f157a48e3eb6b7"}, + {file = "numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9dc13c6a5829610cc07422bc74d3ac083bd8323f14e2827d992f9e52e22cd6a6"}, + {file = "numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d79715d95f1894771eb4e60fb23f065663b2298f7d22945d66877aadf33d00c7"}, + {file = "numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952cfd0748514ea7c3afc729a0fc639e61655ce4c55ab9acfab14bda4f402b4c"}, + {file = "numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b83648633d46f77039c29078751f80da65aa64d5622a3cd62aaef9d835b6c93"}, + {file = "numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b001bae8cea1c7dfdb2ae2b017ed0a6f2102d7a70059df1e338e307a4c78a8ae"}, + {file = "numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e9aced64054739037d42fb84c54dd38b81ee238816c948c8f3ed134665dcd86"}, + {file = "numpy-2.3.3-cp313-cp313-win32.whl", hash = "sha256:9591e1221db3f37751e6442850429b3aabf7026d3b05542d102944ca7f00c8a8"}, + {file = "numpy-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f0dadeb302887f07431910f67a14d57209ed91130be0adea2f9793f1a4f817cf"}, + {file = "numpy-2.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:3c7cf302ac6e0b76a64c4aecf1a09e51abd9b01fc7feee80f6c43e3ab1b1dbc5"}, + {file = "numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:eda59e44957d272846bb407aad19f89dc6f58fecf3504bd144f4c5cf81a7eacc"}, + {file = "numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:823d04112bc85ef5c4fda73ba24e6096c8f869931405a80aa8b0e604510a26bc"}, + {file = "numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:40051003e03db4041aa325da2a0971ba41cf65714e65d296397cc0e32de6018b"}, + {file = "numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ee9086235dd6ab7ae75aba5662f582a81ced49f0f1c6de4260a78d8f2d91a19"}, + {file = "numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94fcaa68757c3e2e668ddadeaa86ab05499a70725811e582b6a9858dd472fb30"}, + {file = "numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da1a74b90e7483d6ce5244053399a614b1d6b7bc30a60d2f570e5071f8959d3e"}, + {file = "numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2990adf06d1ecee3b3dcbb4977dfab6e9f09807598d647f04d385d29e7a3c3d3"}, + {file = "numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ed635ff692483b8e3f0fcaa8e7eb8a75ee71aa6d975388224f70821421800cea"}, + {file = "numpy-2.3.3-cp313-cp313t-win32.whl", hash = "sha256:a333b4ed33d8dc2b373cc955ca57babc00cd6f9009991d9edc5ddbc1bac36bcd"}, + {file = "numpy-2.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:4384a169c4d8f97195980815d6fcad04933a7e1ab3b530921c3fef7a1c63426d"}, + {file = "numpy-2.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:75370986cc0bc66f4ce5110ad35aae6d182cc4ce6433c40ad151f53690130bf1"}, + {file = "numpy-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cd052f1fa6a78dee696b58a914b7229ecfa41f0a6d96dc663c1220a55e137593"}, + {file = "numpy-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:414a97499480067d305fcac9716c29cf4d0d76db6ebf0bf3cbce666677f12652"}, + {file = "numpy-2.3.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:50a5fe69f135f88a2be9b6ca0481a68a136f6febe1916e4920e12f1a34e708a7"}, + {file = "numpy-2.3.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:b912f2ed2b67a129e6a601e9d93d4fa37bef67e54cac442a2f588a54afe5c67a"}, + {file = "numpy-2.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9e318ee0596d76d4cb3d78535dc005fa60e5ea348cd131a51e99d0bdbe0b54fe"}, + {file = "numpy-2.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce020080e4a52426202bdb6f7691c65bb55e49f261f31a8f506c9f6bc7450421"}, + {file = "numpy-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e6687dc183aa55dae4a705b35f9c0f8cb178bcaa2f029b241ac5356221d5c021"}, + {file = "numpy-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d8f3b1080782469fdc1718c4ed1d22549b5fb12af0d57d35e992158a772a37cf"}, + {file = "numpy-2.3.3-cp314-cp314-win32.whl", hash = "sha256:cb248499b0bc3be66ebd6578b83e5acacf1d6cb2a77f2248ce0e40fbec5a76d0"}, + {file = "numpy-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:691808c2b26b0f002a032c73255d0bd89751425f379f7bcd22d140db593a96e8"}, + {file = "numpy-2.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:9ad12e976ca7b10f1774b03615a2a4bab8addce37ecc77394d8e986927dc0dfe"}, + {file = "numpy-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9cc48e09feb11e1db00b320e9d30a4151f7369afb96bd0e48d942d09da3a0d00"}, + {file = "numpy-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:901bf6123879b7f251d3631967fd574690734236075082078e0571977c6a8e6a"}, + {file = "numpy-2.3.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:7f025652034199c301049296b59fa7d52c7e625017cae4c75d8662e377bf487d"}, + {file = "numpy-2.3.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:533ca5f6d325c80b6007d4d7fb1984c303553534191024ec6a524a4c92a5935a"}, + {file = "numpy-2.3.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0edd58682a399824633b66885d699d7de982800053acf20be1eaa46d92009c54"}, + {file = "numpy-2.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:367ad5d8fbec5d9296d18478804a530f1191e24ab4d75ab408346ae88045d25e"}, + {file = "numpy-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8f6ac61a217437946a1fa48d24c47c91a0c4f725237871117dea264982128097"}, + {file = "numpy-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:179a42101b845a816d464b6fe9a845dfaf308fdfc7925387195570789bb2c970"}, + {file = "numpy-2.3.3-cp314-cp314t-win32.whl", hash = "sha256:1250c5d3d2562ec4174bce2e3a1523041595f9b651065e4a4473f5f48a6bc8a5"}, + {file = "numpy-2.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:b37a0b2e5935409daebe82c1e42274d30d9dd355852529eab91dab8dcca7419f"}, + {file = "numpy-2.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:78c9f6560dc7e6b3990e32df7ea1a50bbd0e2a111e05209963f5ddcab7073b0b"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1e02c7159791cd481e1e6d5ddd766b62a4d5acf8df4d4d1afe35ee9c5c33a41e"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:dca2d0fc80b3893ae72197b39f69d55a3cd8b17ea1b50aa4c62de82419936150"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:99683cbe0658f8271b333a1b1b4bb3173750ad59c0c61f5bbdc5b318918fffe3"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d9d537a39cc9de668e5cd0e25affb17aec17b577c6b3ae8a3d866b479fbe88d0"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8596ba2f8af5f93b01d97563832686d20206d303024777f6dfc2e7c7c3f1850e"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1ec5615b05369925bd1125f27df33f3b6c8bc10d788d5999ecd8769a1fa04db"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:2e267c7da5bf7309670523896df97f93f6e469fb931161f483cd6882b3b1a5dc"}, + {file = "numpy-2.3.3.tar.gz", hash = "sha256:ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029"}, +] + +[[package]] +name = "oauthlib" +version = "3.3.1" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1"}, + {file = "oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9"}, ] +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + [[package]] name = "outcome" version = "1.3.0.post0" @@ -901,19 +1270,48 @@ version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.4.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85"}, + {file = "platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + [[package]] name = "pluggy" version = "1.6.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["dev"] files = [ {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, @@ -935,6 +1333,43 @@ files = [ {file = "poetry_core-2.1.3.tar.gz", hash = "sha256:0522a015477ed622c89aad56a477a57813cace0c8e7ff2a2906b7ef4a2e296a4"}, ] +[[package]] +name = "proto-plus" +version = "1.26.1" +description = "Beautiful, Pythonic protocol buffers" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66"}, + {file = "proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<7.0.0" + +[package.extras] +testing = ["google-api-core (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "6.32.0" +description = "" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "protobuf-6.32.0-cp310-abi3-win32.whl", hash = "sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741"}, + {file = "protobuf-6.32.0-cp310-abi3-win_amd64.whl", hash = "sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e"}, + {file = "protobuf-6.32.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0"}, + {file = "protobuf-6.32.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1"}, + {file = "protobuf-6.32.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c"}, + {file = "protobuf-6.32.0-cp39-cp39-win32.whl", hash = "sha256:7db8ed09024f115ac877a1427557b838705359f047b2ff2f2b2364892d19dacb"}, + {file = "protobuf-6.32.0-cp39-cp39-win_amd64.whl", hash = "sha256:15eba1b86f193a407607112ceb9ea0ba9569aed24f93333fe9a497cf2fda37d3"}, + {file = "protobuf-6.32.0-py3-none-any.whl", hash = "sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783"}, + {file = "protobuf-6.32.0.tar.gz", hash = "sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2"}, +] + [[package]] name = "psycopg2-binary" version = "2.9.10" @@ -1013,6 +1448,33 @@ files = [ {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, ] +[[package]] +name = "pyasn1" +version = "0.6.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, + {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, +] + +[package.dependencies] +pyasn1 = ">=0.6.1,<0.7.0" + [[package]] name = "pycparser" version = "2.23" @@ -1190,7 +1652,7 @@ version = "2.19.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["dev"] files = [ {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, @@ -1215,6 +1677,21 @@ files = [ ed25519 = ["PyNaCl (>=1.4.0)"] rsa = ["cryptography"] +[[package]] +name = "pyparsing" +version = "3.2.3" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf"}, + {file = "pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + [[package]] name = "pyperclip" version = "1.9.0" @@ -1245,7 +1722,7 @@ version = "8.4.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["dev"] files = [ {file = "pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79"}, {file = "pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01"}, @@ -1458,6 +1935,40 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=3.4" +groups = ["main"] +files = [ + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "rsa" +version = "4.9.1" +description = "Pure-Python RSA implementation" +optional = false +python-versions = "<4,>=3.6" +groups = ["main"] +files = [ + {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, + {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + [[package]] name = "safetensors" version = "0.6.2" @@ -1497,6 +2008,135 @@ testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2 testingfree = ["huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools-rust (>=1.5.2)"] torch = ["safetensors[numpy]", "torch (>=1.10)"] +[[package]] +name = "scikit-learn" +version = "1.7.2" +description = "A set of python modules for machine learning and data mining" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "scikit_learn-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b33579c10a3081d076ab403df4a4190da4f4432d443521674637677dc91e61f"}, + {file = "scikit_learn-1.7.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:36749fb62b3d961b1ce4fedf08fa57a1986cd409eff2d783bca5d4b9b5fce51c"}, + {file = "scikit_learn-1.7.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7a58814265dfc52b3295b1900cfb5701589d30a8bb026c7540f1e9d3499d5ec8"}, + {file = "scikit_learn-1.7.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a847fea807e278f821a0406ca01e387f97653e284ecbd9750e3ee7c90347f18"}, + {file = "scikit_learn-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:ca250e6836d10e6f402436d6463d6c0e4d8e0234cfb6a9a47835bd392b852ce5"}, + {file = "scikit_learn-1.7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7509693451651cd7361d30ce4e86a1347493554f172b1c72a39300fa2aea79e"}, + {file = "scikit_learn-1.7.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:0486c8f827c2e7b64837c731c8feff72c0bd2b998067a8a9cbc10643c31f0fe1"}, + {file = "scikit_learn-1.7.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:89877e19a80c7b11a2891a27c21c4894fb18e2c2e077815bcade10d34287b20d"}, + {file = "scikit_learn-1.7.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8da8bf89d4d79aaec192d2bda62f9b56ae4e5b4ef93b6a56b5de4977e375c1f1"}, + {file = "scikit_learn-1.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:9b7ed8d58725030568523e937c43e56bc01cadb478fc43c042a9aca1dacb3ba1"}, + {file = "scikit_learn-1.7.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8d91a97fa2b706943822398ab943cde71858a50245e31bc71dba62aab1d60a96"}, + {file = "scikit_learn-1.7.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:acbc0f5fd2edd3432a22c69bed78e837c70cf896cd7993d71d51ba6708507476"}, + {file = "scikit_learn-1.7.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e5bf3d930aee75a65478df91ac1225ff89cd28e9ac7bd1196853a9229b6adb0b"}, + {file = "scikit_learn-1.7.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4d6e9deed1a47aca9fe2f267ab8e8fe82ee20b4526b2c0cd9e135cea10feb44"}, + {file = "scikit_learn-1.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:6088aa475f0785e01bcf8529f55280a3d7d298679f50c0bb70a2364a82d0b290"}, + {file = "scikit_learn-1.7.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b7dacaa05e5d76759fb071558a8b5130f4845166d88654a0f9bdf3eb57851b7"}, + {file = "scikit_learn-1.7.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:abebbd61ad9e1deed54cca45caea8ad5f79e1b93173dece40bb8e0c658dbe6fe"}, + {file = "scikit_learn-1.7.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:502c18e39849c0ea1a5d681af1dbcf15f6cce601aebb657aabbfe84133c1907f"}, + {file = "scikit_learn-1.7.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a4c328a71785382fe3fe676a9ecf2c86189249beff90bf85e22bdb7efaf9ae0"}, + {file = "scikit_learn-1.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:63a9afd6f7b229aad94618c01c252ce9e6fa97918c5ca19c9a17a087d819440c"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9acb6c5e867447b4e1390930e3944a005e2cb115922e693c08a323421a6966e8"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:2a41e2a0ef45063e654152ec9d8bcfc39f7afce35b08902bfe290c2498a67a6a"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98335fb98509b73385b3ab2bd0639b1f610541d3988ee675c670371d6a87aa7c"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:191e5550980d45449126e23ed1d5e9e24b2c68329ee1f691a3987476e115e09c"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-win_amd64.whl", hash = "sha256:57dc4deb1d3762c75d685507fbd0bc17160144b2f2ba4ccea5dc285ab0d0e973"}, + {file = "scikit_learn-1.7.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fa8f63940e29c82d1e67a45d5297bdebbcb585f5a5a50c4914cc2e852ab77f33"}, + {file = "scikit_learn-1.7.2-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:f95dc55b7902b91331fa4e5845dd5bde0580c9cd9612b1b2791b7e80c3d32615"}, + {file = "scikit_learn-1.7.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9656e4a53e54578ad10a434dc1f993330568cfee176dff07112b8785fb413106"}, + {file = "scikit_learn-1.7.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96dc05a854add0e50d3f47a1ef21a10a595016da5b007c7d9cd9d0bffd1fcc61"}, + {file = "scikit_learn-1.7.2-cp314-cp314-win_amd64.whl", hash = "sha256:bb24510ed3f9f61476181e4db51ce801e2ba37541def12dc9333b946fc7a9cf8"}, + {file = "scikit_learn-1.7.2.tar.gz", hash = "sha256:20e9e49ecd130598f1ca38a1d85090e1a600147b9c02fa6f15d69cb53d968fda"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.22.0" +scipy = ">=1.8.0" +threadpoolctl = ">=3.1.0" + +[package.extras] +benchmark = ["matplotlib (>=3.5.0)", "memory_profiler (>=0.57.0)", "pandas (>=1.4.0)"] +build = ["cython (>=3.0.10)", "meson-python (>=0.17.1)", "numpy (>=1.22.0)", "scipy (>=1.8.0)"] +docs = ["Pillow (>=8.4.0)", "matplotlib (>=3.5.0)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.4.0)", "plotly (>=5.14.0)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.19.0)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-design (>=0.6.0)", "sphinx-gallery (>=0.17.1)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)", "towncrier (>=24.8.0)"] +examples = ["matplotlib (>=3.5.0)", "pandas (>=1.4.0)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.19.0)", "seaborn (>=0.9.0)"] +install = ["joblib (>=1.2.0)", "numpy (>=1.22.0)", "scipy (>=1.8.0)", "threadpoolctl (>=3.1.0)"] +maintenance = ["conda-lock (==3.0.1)"] +tests = ["matplotlib (>=3.5.0)", "mypy (>=1.15)", "numpydoc (>=1.2.0)", "pandas (>=1.4.0)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pyamg (>=4.2.1)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.11.7)", "scikit-image (>=0.19.0)"] + +[[package]] +name = "scipy" +version = "1.16.1" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = ">=3.11" +groups = ["main"] +files = [ + {file = "scipy-1.16.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c033fa32bab91dc98ca59d0cf23bb876454e2bb02cbe592d5023138778f70030"}, + {file = "scipy-1.16.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6e5c2f74e5df33479b5cd4e97a9104c511518fbd979aa9b8f6aec18b2e9ecae7"}, + {file = "scipy-1.16.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0a55ffe0ba0f59666e90951971a884d1ff6f4ec3275a48f472cfb64175570f77"}, + {file = "scipy-1.16.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:f8a5d6cd147acecc2603fbd382fed6c46f474cccfcf69ea32582e033fb54dcfe"}, + {file = "scipy-1.16.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cb18899127278058bcc09e7b9966d41a5a43740b5bb8dcba401bd983f82e885b"}, + {file = "scipy-1.16.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adccd93a2fa937a27aae826d33e3bfa5edf9aa672376a4852d23a7cd67a2e5b7"}, + {file = "scipy-1.16.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:18aca1646a29ee9a0625a1be5637fa798d4d81fdf426481f06d69af828f16958"}, + {file = "scipy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d85495cef541729a70cdddbbf3e6b903421bc1af3e8e3a9a72a06751f33b7c39"}, + {file = "scipy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:226652fca853008119c03a8ce71ffe1b3f6d2844cc1686e8f9806edafae68596"}, + {file = "scipy-1.16.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:81b433bbeaf35728dad619afc002db9b189e45eebe2cd676effe1fb93fef2b9c"}, + {file = "scipy-1.16.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:886cc81fdb4c6903a3bb0464047c25a6d1016fef77bb97949817d0c0d79f9e04"}, + {file = "scipy-1.16.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:15240c3aac087a522b4eaedb09f0ad061753c5eebf1ea430859e5bf8640d5919"}, + {file = "scipy-1.16.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:65f81a25805f3659b48126b5053d9e823d3215e4a63730b5e1671852a1705921"}, + {file = "scipy-1.16.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6c62eea7f607f122069b9bad3f99489ddca1a5173bef8a0c75555d7488b6f725"}, + {file = "scipy-1.16.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f965bbf3235b01c776115ab18f092a95aa74c271a52577bcb0563e85738fd618"}, + {file = "scipy-1.16.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f006e323874ffd0b0b816d8c6a8e7f9a73d55ab3b8c3f72b752b226d0e3ac83d"}, + {file = "scipy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8fd15fc5085ab4cca74cb91fe0a4263b1f32e4420761ddae531ad60934c2119"}, + {file = "scipy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:f7b8013c6c066609577d910d1a2a077021727af07b6fab0ee22c2f901f22352a"}, + {file = "scipy-1.16.1-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:5451606823a5e73dfa621a89948096c6528e2896e40b39248295d3a0138d594f"}, + {file = "scipy-1.16.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:89728678c5ca5abd610aee148c199ac1afb16e19844401ca97d43dc548a354eb"}, + {file = "scipy-1.16.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e756d688cb03fd07de0fffad475649b03cb89bee696c98ce508b17c11a03f95c"}, + {file = "scipy-1.16.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5aa2687b9935da3ed89c5dbed5234576589dd28d0bf7cd237501ccfbdf1ad608"}, + {file = "scipy-1.16.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0851f6a1e537fe9399f35986897e395a1aa61c574b178c0d456be5b1a0f5ca1f"}, + {file = "scipy-1.16.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fedc2cbd1baed37474b1924c331b97bdff611d762c196fac1a9b71e67b813b1b"}, + {file = "scipy-1.16.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2ef500e72f9623a6735769e4b93e9dcb158d40752cdbb077f305487e3e2d1f45"}, + {file = "scipy-1.16.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:978d8311674b05a8f7ff2ea6c6bce5d8b45a0cb09d4c5793e0318f448613ea65"}, + {file = "scipy-1.16.1-cp313-cp313-win_amd64.whl", hash = "sha256:81929ed0fa7a5713fcdd8b2e6f73697d3b4c4816d090dd34ff937c20fa90e8ab"}, + {file = "scipy-1.16.1-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:bcc12db731858abda693cecdb3bdc9e6d4bd200213f49d224fe22df82687bdd6"}, + {file = "scipy-1.16.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:744d977daa4becb9fc59135e75c069f8d301a87d64f88f1e602a9ecf51e77b27"}, + {file = "scipy-1.16.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:dc54f76ac18073bcecffb98d93f03ed6b81a92ef91b5d3b135dcc81d55a724c7"}, + {file = "scipy-1.16.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:367d567ee9fc1e9e2047d31f39d9d6a7a04e0710c86e701e053f237d14a9b4f6"}, + {file = "scipy-1.16.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4cf5785e44e19dcd32a0e4807555e1e9a9b8d475c6afff3d21c3c543a6aa84f4"}, + {file = "scipy-1.16.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3d0b80fb26d3e13a794c71d4b837e2a589d839fd574a6bbb4ee1288c213ad4a3"}, + {file = "scipy-1.16.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8503517c44c18d1030d666cb70aaac1cc8913608816e06742498833b128488b7"}, + {file = "scipy-1.16.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:30cc4bb81c41831ecfd6dc450baf48ffd80ef5aed0f5cf3ea775740e80f16ecc"}, + {file = "scipy-1.16.1-cp313-cp313t-win_amd64.whl", hash = "sha256:c24fa02f7ed23ae514460a22c57eca8f530dbfa50b1cfdbf4f37c05b5309cc39"}, + {file = "scipy-1.16.1-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:796a5a9ad36fa3a782375db8f4241ab02a091308eb079746bc0f874c9b998318"}, + {file = "scipy-1.16.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:3ea0733a2ff73fd6fdc5fecca54ee9b459f4d74f00b99aced7d9a3adb43fb1cc"}, + {file = "scipy-1.16.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:85764fb15a2ad994e708258bb4ed8290d1305c62a4e1ef07c414356a24fcfbf8"}, + {file = "scipy-1.16.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:ca66d980469cb623b1759bdd6e9fd97d4e33a9fad5b33771ced24d0cb24df67e"}, + {file = "scipy-1.16.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e7cc1ffcc230f568549fc56670bcf3df1884c30bd652c5da8138199c8c76dae0"}, + {file = "scipy-1.16.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ddfb1e8d0b540cb4ee9c53fc3dea3186f97711248fb94b4142a1b27178d8b4b"}, + {file = "scipy-1.16.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4dc0e7be79e95d8ba3435d193e0d8ce372f47f774cffd882f88ea4e1e1ddc731"}, + {file = "scipy-1.16.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f23634f9e5adb51b2a77766dac217063e764337fbc816aa8ad9aaebcd4397fd3"}, + {file = "scipy-1.16.1-cp314-cp314-win_amd64.whl", hash = "sha256:57d75524cb1c5a374958a2eae3d84e1929bb971204cc9d52213fb8589183fc19"}, + {file = "scipy-1.16.1-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:d8da7c3dd67bcd93f15618938f43ed0995982eb38973023d46d4646c4283ad65"}, + {file = "scipy-1.16.1-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:cc1d2f2fd48ba1e0620554fe5bc44d3e8f5d4185c8c109c7fbdf5af2792cfad2"}, + {file = "scipy-1.16.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:21a611ced9275cb861bacadbada0b8c0623bc00b05b09eb97f23b370fc2ae56d"}, + {file = "scipy-1.16.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:8dfbb25dffc4c3dd9371d8ab456ca81beeaf6f9e1c2119f179392f0dc1ab7695"}, + {file = "scipy-1.16.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f0ebb7204f063fad87fc0a0e4ff4a2ff40b2a226e4ba1b7e34bf4b79bf97cd86"}, + {file = "scipy-1.16.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f1b9e5962656f2734c2b285a8745358ecb4e4efbadd00208c80a389227ec61ff"}, + {file = "scipy-1.16.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e1a106f8c023d57a2a903e771228bf5c5b27b5d692088f457acacd3b54511e4"}, + {file = "scipy-1.16.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:709559a1db68a9abc3b2c8672c4badf1614f3b440b3ab326d86a5c0491eafae3"}, + {file = "scipy-1.16.1-cp314-cp314t-win_amd64.whl", hash = "sha256:c0c804d60492a0aad7f5b2bb1862f4548b990049e27e828391ff2bf6f7199998"}, + {file = "scipy-1.16.1.tar.gz", hash = "sha256:44c76f9e8b6e8e488a586190ab38016e4ed2f8a038af7cd3defa903c0a2238b3"}, +] + +[package.dependencies] +numpy = ">=1.25.2,<2.6" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] +doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.19.1)", "jupytext", "linkify-it-py", "matplotlib (>=3.5)", "myst-nb (>=1.2.0)", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.2.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"] +test = ["Cython", "array-api-strict (>=2.3.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + [[package]] name = "selenium" version = "4.35.0" @@ -1517,6 +2157,28 @@ typing_extensions = ">=4.14.0,<4.15.0" urllib3 = {version = ">=2.5.0,<3.0", extras = ["socks"]} websocket-client = ">=1.8.0,<1.9.0" +[[package]] +name = "setuptools" +version = "80.9.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, + {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] + [[package]] name = "sniffio" version = "1.3.1" @@ -1668,6 +2330,36 @@ typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\"" [package.extras] full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] +[[package]] +name = "sympy" +version = "1.14.0" +description = "Computer algebra system (CAS) in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5"}, + {file = "sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517"}, +] + +[package.dependencies] +mpmath = ">=1.1.0,<1.4" + +[package.extras] +dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] + +[[package]] +name = "threadpoolctl" +version = "3.6.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb"}, + {file = "threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e"}, +] + [[package]] name = "tokenizers" version = "0.22.0" @@ -1701,6 +2393,61 @@ dev = ["tokenizers[testing]"] docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] testing = ["black (==22.3)", "datasets", "numpy", "pytest", "pytest-asyncio", "requests", "ruff"] +[[package]] +name = "torch" +version = "2.8.0+cpu" +description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" +optional = false +python-versions = ">=3.9.0" +groups = ["main"] +files = [ + {file = "torch-2.8.0+cpu-cp310-cp310-linux_s390x.whl", hash = "sha256:5d255d259fbc65439b671580e40fdb8faea4644761b64fed90d6904ffe71bbc1"}, + {file = "torch-2.8.0+cpu-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b2149858b8340aeeb1f3056e0bff5b82b96e43b596fe49a9dba3184522261213"}, + {file = "torch-2.8.0+cpu-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:16d75fa4e96ea28a785dfd66083ca55eb1058b6d6c5413f01656ca965ee2077e"}, + {file = "torch-2.8.0+cpu-cp310-cp310-win_amd64.whl", hash = "sha256:7cc4af6ba954f36c2163eab98cf113c137fc25aa8bbf1b06ef155968627beed2"}, + {file = "torch-2.8.0+cpu-cp311-cp311-linux_s390x.whl", hash = "sha256:2bfc013dd6efdc8f8223a0241d3529af9f315dffefb53ffa3bf14d3f10127da6"}, + {file = "torch-2.8.0+cpu-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:680129efdeeec3db5da3f88ee5d28c1b1e103b774aef40f9d638e2cce8f8d8d8"}, + {file = "torch-2.8.0+cpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cb06175284673a581dd91fb1965662ae4ecaba6e5c357aa0ea7bb8b84b6b7eeb"}, + {file = "torch-2.8.0+cpu-cp311-cp311-win_amd64.whl", hash = "sha256:7631ef49fbd38d382909525b83696dc12a55d68492ade4ace3883c62b9fc140f"}, + {file = "torch-2.8.0+cpu-cp311-cp311-win_arm64.whl", hash = "sha256:41e6fc5ec0914fcdce44ccf338b1d19a441b55cafdd741fd0bf1af3f9e4cfd14"}, + {file = "torch-2.8.0+cpu-cp312-cp312-linux_s390x.whl", hash = "sha256:0e34e276722ab7dd0dffa9e12fe2135a9b34a0e300c456ed7ad6430229404eb5"}, + {file = "torch-2.8.0+cpu-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:610f600c102386e581327d5efc18c0d6edecb9820b4140d26163354a99cd800d"}, + {file = "torch-2.8.0+cpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:cb9a8ba8137ab24e36bf1742cb79a1294bd374db570f09fc15a5e1318160db4e"}, + {file = "torch-2.8.0+cpu-cp312-cp312-win_amd64.whl", hash = "sha256:2be20b2c05a0cce10430cc25f32b689259640d273232b2de357c35729132256d"}, + {file = "torch-2.8.0+cpu-cp312-cp312-win_arm64.whl", hash = "sha256:99fc421a5d234580e45957a7b02effbf3e1c884a5dd077afc85352c77bf41434"}, + {file = "torch-2.8.0+cpu-cp313-cp313-linux_s390x.whl", hash = "sha256:8b5882276633cf91fe3d2d7246c743b94d44a7e660b27f1308007fdb1bb89f7d"}, + {file = "torch-2.8.0+cpu-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:a5064b5e23772c8d164068cc7c12e01a75faf7b948ecd95a0d4007d7487e5f25"}, + {file = "torch-2.8.0+cpu-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:8f81dedb4c6076ec325acc3b47525f9c550e5284a18eae1d9061c543f7b6e7de"}, + {file = "torch-2.8.0+cpu-cp313-cp313-win_amd64.whl", hash = "sha256:e1ee1b2346ade3ea90306dfbec7e8ff17bc220d344109d189ae09078333b0856"}, + {file = "torch-2.8.0+cpu-cp313-cp313-win_arm64.whl", hash = "sha256:64c187345509f2b1bb334feed4666e2c781ca381874bde589182f81247e61f88"}, + {file = "torch-2.8.0+cpu-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:af81283ac671f434b1b25c95ba295f270e72db1fad48831eb5e4748ff9840041"}, + {file = "torch-2.8.0+cpu-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:a9dbb6f64f63258bc811e2c0c99640a81e5af93c531ad96e95c5ec777ea46dab"}, + {file = "torch-2.8.0+cpu-cp313-cp313t-win_amd64.whl", hash = "sha256:6d93a7165419bc4b2b907e859ccab0dea5deeab261448ae9a5ec5431f14c0e64"}, + {file = "torch-2.8.0+cpu-cp39-cp39-linux_s390x.whl", hash = "sha256:5239ef35402000844b676a9b79ed76d5ae6b028a6762bbdfebdf8421a0f4d2aa"}, + {file = "torch-2.8.0+cpu-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:eac8b7ef5c7ca106daec5e829dfa8ca56ca47601db13b402d2608861ad3ab926"}, + {file = "torch-2.8.0+cpu-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:bda4f93d64dcd9ae5d51844bbccc6fcb7d603522bcc95d256b5fe3bdb9dccca3"}, + {file = "torch-2.8.0+cpu-cp39-cp39-win_amd64.whl", hash = "sha256:e3c3fce24ebaac954b837d1498e36d484ad0d93e2a1ed5b6b0c55a02ea748fab"}, +] + +[package.dependencies] +filelock = "*" +fsspec = "*" +jinja2 = "*" +networkx = "*" +setuptools = {version = "*", markers = "python_version >= \"3.12\""} +sympy = ">=1.13.3" +typing-extensions = ">=4.10.0" + +[package.extras] +opt-einsum = ["opt-einsum (>=3.3)"] +optree = ["optree (>=0.13.0)"] +pyyaml = ["pyyaml"] + +[package.source] +type = "legacy" +url = "https://download.pytorch.org/whl/cpu" +reference = "pytorch" + [[package]] name = "tqdm" version = "4.67.1" @@ -1862,6 +2609,18 @@ files = [ [package.dependencies] typing-extensions = ">=4.12.0" +[[package]] +name = "uritemplate" +version = "4.2.0" +description = "Implementation of RFC 6570 URI Templates" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "uritemplate-4.2.0-py3-none-any.whl", hash = "sha256:962201ba1c4edcab02e60f9a0d3821e82dfc5d2d6662a21abd533879bdb8a686"}, + {file = "uritemplate-4.2.0.tar.gz", hash = "sha256:480c2ed180878955863323eea31b0ede668795de182617fef9c6ca09e6ec9d0e"}, +] + [[package]] name = "urllib3" version = "2.5.0" @@ -1953,4 +2712,4 @@ h11 = ">=0.9.0,<1" [metadata] lock-version = "2.1" python-versions = ">=3.11,<3.14" -content-hash = "1d22766abbf718055b5ad2442ed8a1ad90732746d77df7dc19194a0ca3b219ba" +content-hash = "b0e5c64a4a497967e0291b75d8e4dc78a435af95437892b8254f2e170a7cf567" diff --git a/apps/pre-processing-service/pyproject.toml b/apps/pre-processing-service/pyproject.toml index bad7f3bc..62e90397 100644 --- a/apps/pre-processing-service/pyproject.toml +++ b/apps/pre-processing-service/pyproject.toml @@ -5,34 +5,49 @@ description = "" authors = [ {name = "skip"} ] - +readme = "README.md" requires-python = ">=3.11,<3.14" -dependencies = [ - "fastapi (>=0.116.1,<0.117.0)", - "uvicorn (>=0.35.0,<0.36.0)", - "loguru (>=0.7.3,<0.8.0)", - "pytest (>=8.4.1,<9.0.0)", - "dotenv (>=0.9.9,<0.10.0)", - "pydantic-settings (>=2.10.1,<3.0.0)", - "psycopg2-binary (>=2.9.10,<3.0.0)", - "asyncpg (>=0.30.0,<0.31.0)", - "gunicorn (>=23.0.0,<24.0.0)", - "requests (>=2.32.5,<3.0.0)", - "bs4 (>=0.0.2,<0.0.3)", - "selenium (>=4.35.0,<5.0.0)", - "transformers (>=4.56.0,<5.0.0)", - "numpy (>=2.3.2,<3.0.0)", - "python-dotenv (>=1.1.1,<2.0.0)", - "mecab-python3 (>=1.0.10,<2.0.0)", - "httpx (>=0.28.1,<0.29.0)", - "pyperclip (>=1.9.0,<2.0.0)", - "pymysql (>=1.1.2,<2.0.0)", - "sqlalchemy (>=2.0.43,<3.0.0)", - "poetry-core (>=2.1.3,<3.0.0)", - "dbutils (>=3.1.2,<4.0.0)" -] +[[tool.poetry.source]] +name = "pytorch" +url = "https://download.pytorch.org/whl/cpu" +priority = "explicit" + +[tool.poetry.dependencies] +python = ">=3.11,<3.14" +fastapi = ">=0.116.1,<0.117.0" +uvicorn = ">=0.35.0,<0.36.0" +loguru = ">=0.7.3,<0.8.0" +pydantic-settings = ">=2.10.1,<3.0.0" +psycopg2-binary = ">=2.9.10,<3.0.0" +asyncpg = ">=0.30.0,<0.31.0" +gunicorn = ">=23.0.0,<24.0.0" +requests = ">=2.32.5,<3.0.0" +bs4 = ">=0.0.2,<0.0.3" +selenium = ">=4.35.0,<5.0.0" +transformers = ">=4.56.0,<5.0.0" +numpy = ">=2.3.2,<3.0.0" +torch = { version = "^2.4.0", source = "pytorch" } +#torch = ">=2.8.0,<3.0.0" +scikit-learn = ">=1.7.1,<2.0.0" +python-dotenv = ">=1.1.1,<2.0.0" +mecab-python3 = ">=1.0.10,<2.0.0" +httpx = ">=0.28.1,<0.29.0" +pyperclip = ">=1.9.0,<2.0.0" +pymysql = ">=1.1.2,<2.0.0" +sqlalchemy = ">=2.0.43,<3.0.0" +google = "^3.0.0" +google-auth-oauthlib = "^1.2.2" +google-api-python-client = "^2.181.0" +poetry-core=">=2.1.3,<3.0.0" +dbutils=">=3.1.2,<4.0.0" + [build-system] requires = ["poetry-core>=2.0.0,<3.0.0"] build-backend = "poetry.core.masonry.api" + +[tool.poetry.group.dev.dependencies] +black = "^25.1.0" +pytest = "^8.4" + diff --git a/docker/production/docker-compose.yml b/docker/production/docker-compose.yml index fdfdaadf..04ea3466 100644 --- a/docker/production/docker-compose.yml +++ b/docker/production/docker-compose.yml @@ -10,6 +10,15 @@ services: networks: - app-network + pre-processing-service: + image: ghcr.io/kernel180-be12/final-4team-icebang/pre-processing-service:latest + container_name: pre-processing-service + restart: always + ports: + - "8000:8000" + networks: + - app-network + networks: app-network: driver: bridge From cacc38fa6fbef6b4ab3a0ff67085e0a12e3466c2 Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Wed, 10 Sep 2025 17:21:21 +0900 Subject: [PATCH 23/31] =?UTF-8?q?env=20=EC=84=B8=ED=8C=85=20=EB=B0=8F=20re?= =?UTF-8?q?lease=20drafter=EC=A1=B0=EC=A0=95=20(#66)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: production 환경 env 세팅 * chore: set up env in github action * chore: Release drafter version tagging 분리 * fix: release drafter 조건 --- .github/release-drafter-pre-processing.yml | 30 ++++++++++++++ ...r.yml => release-drafter-user-service.yml} | 4 +- .github/workflows/deploy-java.yml | 27 ++++++++++++- .github/workflows/release-drafter.yml | 40 ++++++++++++------- .../main/resources/application-production.yml | 27 +++++++++++++ 5 files changed, 109 insertions(+), 19 deletions(-) create mode 100644 .github/release-drafter-pre-processing.yml rename .github/{release-drafter.yml => release-drafter-user-service.yml} (82%) diff --git a/.github/release-drafter-pre-processing.yml b/.github/release-drafter-pre-processing.yml new file mode 100644 index 00000000..691e70b6 --- /dev/null +++ b/.github/release-drafter-pre-processing.yml @@ -0,0 +1,30 @@ +name-template: 'pre-processing-v$RESOLVED_VERSION' +tag-template: 'pre-processing-v$RESOLVED_VERSION' +categories: + - title: 'Feature' + labels: + - 'enhancement' + - 'pre-processing' + - title: 'Bug Fixes' + labels: + - 'bug' + - 'pre-processing' +change-template: '- $TITLE @$AUTHOR (#$NUMBER)' +change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks. +version-resolver: + major: + labels: + - 'major' + - 'pre-processing' + minor: + labels: + - 'minor' + - 'pre-processing' + patch: + labels: + - 'patch' + - 'pre-processing' + default: patch +template: | + ## Changes + $CHANGES \ No newline at end of file diff --git a/.github/release-drafter.yml b/.github/release-drafter-user-service.yml similarity index 82% rename from .github/release-drafter.yml rename to .github/release-drafter-user-service.yml index d7b5b424..abc943aa 100644 --- a/.github/release-drafter.yml +++ b/.github/release-drafter-user-service.yml @@ -1,5 +1,5 @@ -name-template: 'v$RESOLVED_VERSION' -tag-template: 'v$RESOLVED_VERSION' +name-template: 'user-service-v$RESOLVED_VERSION' +tag-template: 'user-service-v$RESOLVED_VERSION' categories: - title: 'Feature' labels: diff --git a/.github/workflows/deploy-java.yml b/.github/workflows/deploy-java.yml index e8b35476..69a1909d 100644 --- a/.github/workflows/deploy-java.yml +++ b/.github/workflows/deploy-java.yml @@ -14,6 +14,15 @@ jobs: steps: - uses: actions/checkout@v4 + - name: Create env file + run: | + echo "DB_HOST=${{ secrets.DB_HOST }}" > .env.prod + echo "DB_PORT=${{ secrets.DB_PORT }}" >> .env.prod + echo "DB_USER=${{ secrets.DB_USER }}" >> .env.prod + echo "DB_PASS=${{ secrets.DB_PASS }}" >> .env.prod + echo "DB_NAME=${{ secrets.DB_NAME }}" >> .env.prod + echo "ENV_NAME=${{ secrets.ENV_NAME }}" >> .env.prod + - name: Set repo lowercase run: echo "REPO_LC=${GITHUB_REPOSITORY,,}" >> $GITHUB_ENV @@ -26,15 +35,29 @@ jobs: source: "docker/production/docker-compose.yml" target: "~/app" + - name: Copy .env.prod file to EC2 + uses: appleboy/scp-action@v0.1.7 + with: + host: ${{ secrets.SERVER_HOST }} + username: ubuntu + key: ${{ secrets.SERVER_SSH_KEY }} + source: ".env.prod" + target: "~/app/docker/production/" + overwrite: true + - name: Deploy on EC2 uses: appleboy/ssh-action@v1.0.3 with: host: ${{ secrets.SERVER_HOST }} -# username: ${{ secrets.SERVER_USER }} username: ubuntu key: ${{ secrets.SERVER_SSH_KEY }} script: | cd ~/app/docker/production + + # Remove existing .env.prod if exists and move new one + rm -f .env.prod + mv .env.prod.bak .env.prod 2>/dev/null || mv .env.prod .env.prod + echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin docker pull ghcr.io/${{ env.REPO_LC }}/user-service:latest @@ -71,4 +94,4 @@ jobs: **Repository:** ${{ env.REPO_LC }} **Tag:** ${{ github.ref_name }} **Error:** 배포 중 오류가 발생했습니다. - **Check:** ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + **Check:** ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} \ No newline at end of file diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index 5adb153f..148ddb40 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -4,31 +4,41 @@ on: push: branches: - main + paths: + - 'apps/user-service/**' + - 'apps/pre-processing/**' pull_request: types: [opened, reopened, synchronize] + paths: + - 'apps/user-service/**' + - 'apps/pre-processing/**' + permissions: contents: read jobs: - update_release_draft: + update_user_service_release: + runs-on: ubuntu-latest permissions: - # write permission is required to create a github release contents: write - # write permission is required for autolabeler - # otherwise, read permission is required at least pull-requests: write - runs-on: ubuntu-latest + if: ${{ github.event_name == 'push' && contains(github.event.head_commit.modified, 'apps/user-service/') || github.event_name == 'pull_request' }} steps: - # (Optional) GitHub Enterprise requires GHE_HOST variable set - #- name: Set GHE_HOST - # run: | - # echo "GHE_HOST=${GITHUB_SERVER_URL##https:\/\/}" >> $GITHUB_ENV + - uses: release-drafter/release-drafter@v5 + with: + config-name: release-drafter-user-service.yml + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # Drafts your next Release notes as Pull Requests are merged into "main" + update_preprocessing_release: + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + if: ${{ github.event_name == 'push' && contains(github.event.head_commit.modified, 'apps/pre-processing/') || github.event_name == 'pull_request' }} + steps: - uses: release-drafter/release-drafter@v5 - # (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml - # with: - # config-name: my-config.yml - # disable-autolabeler: true + with: + config-name: release-drafter-pre-processing.yml env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/apps/user-service/src/main/resources/application-production.yml b/apps/user-service/src/main/resources/application-production.yml index e69de29b..6b048fbd 100644 --- a/apps/user-service/src/main/resources/application-production.yml +++ b/apps/user-service/src/main/resources/application-production.yml @@ -0,0 +1,27 @@ +spring: + config: + activate: + on-profile: production + + datasource: + url: jdbc:mariadb://${DB_HOST}:${DB_PORT}/${DB_NAME} + username: ${DB_USER} + password: ${DB_PASS} + driver-class-name: org.mariadb.jdbc.Driver + + hikari: + connection-timeout: 30000 + idle-timeout: 600000 + max-lifetime: 1800000 + maximum-pool-size: 10 + minimum-idle: 5 + pool-name: HikariCP-MyBatis + +mybatis: + mapper-locations: classpath:mybatis/mapper/**/*.xml + type-aliases-package: site.icebang.dto + configuration: + map-underscore-to-camel-case: true + +logging: + config: classpath:log4j2-production.yml From e04536032cb2212981bf4df721ccbbb840da1932 Mon Sep 17 00:00:00 2001 From: can019 Date: Wed, 10 Sep 2025 17:52:46 +0900 Subject: [PATCH 24/31] =?UTF-8?q?chore:=20Ci=20pipe=20=EB=82=B4=20tag=20?= =?UTF-8?q?=EA=B5=AC=EB=B6=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/ci-java.yml | 54 +++++++++++++++------------------ .github/workflows/ci-python.yml | 27 ++++++++--------- 2 files changed, 36 insertions(+), 45 deletions(-) diff --git a/.github/workflows/ci-java.yml b/.github/workflows/ci-java.yml index 05006c2f..ead4b2f3 100644 --- a/.github/workflows/ci-java.yml +++ b/.github/workflows/ci-java.yml @@ -2,8 +2,8 @@ name: CI (Java) on: push: - branches: - - main + tags: + - 'user-service-v*' paths: - "apps/user-service/**" pull_request: @@ -21,12 +21,12 @@ permissions: security-events: write checks: write pull-requests: write - pages: write # GitHub Pages 배포를 위해 추가 - id-token: write # GitHub Pages 배포를 위해 추가 + pages: write # GitHub Pages 배포 + id-token: write # GitHub Pages 배포 jobs: spotless-check: - if: github.event.pull_request.draft == false + if: github.event_name == 'pull_request' && github.event.pull_request.draft == false name: Lint Check runs-on: ubuntu-latest @@ -55,7 +55,7 @@ jobs: needs: spotless-check strategy: matrix: - java-version: [ "21" ] + java-version: ["21"] steps: - name: Checkout repository @@ -78,36 +78,30 @@ jobs: - name: Run Tests run: | - if [ "${{ github.base_ref }}" == "main" ]; then - ./gradlew unitTest - ./gradlew integrationTest - else - ./gradlew unitTest - ./gradlew integrationTest - ./gradlew e2eTest - fi + ./gradlew unitTest + ./gradlew integrationTest + ./gradlew e2eTest working-directory: apps/user-service - name: Upload build artifacts - if: matrix.java-version == '21' && github.ref == 'refs/heads/main' && github.event_name == 'push' + if: matrix.java-version == '21' uses: actions/upload-artifact@v4 with: - name: build-artifacts + name: build-artifacts-${{ github.ref_name }} path: apps/user-service/build/libs/ - name: Upload OpenAPI spec artifacts - if: matrix.java-version == '21' && github.ref == 'refs/heads/main' && github.event_name == 'push' + if: matrix.java-version == '21' uses: actions/upload-artifact@v4 with: - name: openapi-spec + name: openapi-spec-${{ github.ref_name }} path: apps/user-service/build/api-spec/ docker: - name: Build Spring Boot Docker Image and push to registry + name: Build Spring Boot Docker Image and push runs-on: ubuntu-latest - if: github.ref == 'refs/heads/main' && github.event_name == 'push' - needs: - - build + needs: build + if: startsWith(github.ref, 'refs/tags/user-service-v') steps: - name: Checkout repository @@ -116,7 +110,7 @@ jobs: - name: Download build artifacts (JAR) uses: actions/download-artifact@v4 with: - name: build-artifacts + name: build-artifacts-${{ github.ref_name }} path: apps/user-service/build/libs/ - name: Login to Docker Registry @@ -135,19 +129,19 @@ jobs: context: ./apps/user-service push: true tags: | + ghcr.io/${{ env.REPO_LC }}/user-service:${{ github.ref_name }} ghcr.io/${{ env.REPO_LC }}/user-service:latest - ghcr.io/${{ env.REPO_LC }}/user-service:${{ github.sha }} - name: Analyze image layers run: | echo "=== Image Layer Analysis ===" - docker history ghcr.io/${{ env.REPO_LC }}/user-service:latest --human --no-trunc + docker history ghcr.io/${{ env.REPO_LC }}/user-service:${{ github.ref_name }} --human --no-trunc swagger-docs: name: Deploy Swagger Documentation runs-on: ubuntu-latest - if: github.ref == 'refs/heads/main' && github.event_name == 'push' needs: build + if: startsWith(github.ref, 'refs/tags/user-service-v') steps: - name: Checkout repository @@ -156,18 +150,18 @@ jobs: - name: Download OpenAPI spec artifacts uses: actions/download-artifact@v4 with: - name: openapi-spec + name: openapi-spec-${{ github.ref_name }} path: ./openapi-spec - name: Generate Swagger UI uses: Legion2/swagger-ui-action@v1 with: - output: user-service-swagger-ui + output: user-service-swagger-ui-${{ github.ref_name }} spec-file: openapi-spec/openapi3.yaml - name: Deploy to GitHub Pages uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./user-service-docs - destination_dir: user-service \ No newline at end of file + publish_dir: ./user-service-swagger-ui-${{ github.ref_name }} + destination_dir: user-service/${{ github.ref_name }} diff --git a/.github/workflows/ci-python.yml b/.github/workflows/ci-python.yml index 5055aea3..35b6b9cd 100644 --- a/.github/workflows/ci-python.yml +++ b/.github/workflows/ci-python.yml @@ -2,10 +2,11 @@ name: CI (Python/FastAPI) on: push: - branches: - - main + # pre-processing 서비스 태그 기준 + tags: + - 'pre-processing-v*' paths: - - "apps/pre-processing-service/**" # Python 서비스 경로 + - "apps/pre-processing-service/**" pull_request: types: [opened, synchronize, reopened, ready_for_review] branches: @@ -13,7 +14,7 @@ on: - develop - release/** paths: - - "apps/pre-processing-service/**" # Python 서비스 경로 + - "apps/pre-processing-service/**" permissions: contents: read @@ -24,7 +25,7 @@ permissions: jobs: lint: - if: github.event.pull_request.draft == false + if: github.event_name == 'pull_request' && github.event.pull_request.draft == false name: Lint & Format Check runs-on: ubuntu-latest @@ -62,9 +63,6 @@ jobs: - name: Run Formatter Check (Black) run: poetry run black --check . - # - name: Run Linter (Ruff) - # run: poetry run ruff check . - test: name: Run Tests runs-on: ubuntu-latest @@ -112,11 +110,10 @@ jobs: run: poetry run pytest build-and-push-docker: - name: Build Docker Image and push to registry + name: Build Docker Image and push runs-on: ubuntu-latest - if: github.ref == 'refs/heads/feature/python-ci' && github.event_name == 'push' - needs: - - test + needs: test + if: startsWith(github.ref, 'refs/tags/pre-processing-v') steps: - name: Checkout repository @@ -135,13 +132,13 @@ jobs: - name: Build and push Docker image uses: docker/build-push-action@v5 with: - context: ./apps/pre-processing-service # Dockerfile이 있는 경로 + context: ./apps/pre-processing-service push: true tags: | + ghcr.io/${{ env.REPO_LC }}/pre-processing-service:${{ github.ref_name }} ghcr.io/${{ env.REPO_LC }}/pre-processing-service:latest - ghcr.io/${{ env.REPO_LC }}/pre-processing-service:${{ github.sha }} - name: Analyze image layers run: | echo "=== Image Layer Analysis ===" - docker history ghcr.io/${{ env.REPO_LC }}/pre-processing-service:latest --human --no-trunc + docker history ghcr.io/${{ env.REPO_LC }}/pre-processing-service:${{ github.ref_name }} --human --no-trunc From 4cd3f9787768088e413d3975610f099389981afa Mon Sep 17 00:00:00 2001 From: can019 Date: Wed, 10 Sep 2025 17:59:08 +0900 Subject: [PATCH 25/31] chore: run e2e tests only for PRs targeting main branch --- .github/workflows/ci-java.yml | 32 ++++++++++++++++++++++++-------- .github/workflows/ci-python.yml | 23 +++++++++++++++-------- 2 files changed, 39 insertions(+), 16 deletions(-) diff --git a/.github/workflows/ci-java.yml b/.github/workflows/ci-java.yml index ead4b2f3..2def4e8c 100644 --- a/.github/workflows/ci-java.yml +++ b/.github/workflows/ci-java.yml @@ -80,7 +80,9 @@ jobs: run: | ./gradlew unitTest ./gradlew integrationTest - ./gradlew e2eTest + if [ "${{ github.base_ref }}" = "main" ]; then + ./gradlew e2eTest + fi working-directory: apps/user-service - name: Upload build artifacts @@ -97,10 +99,22 @@ jobs: name: openapi-spec-${{ github.ref_name }} path: apps/user-service/build/api-spec/ + set-image-tag: + name: Set IMAGE_TAG + runs-on: ubuntu-latest + if: startsWith(github.ref, 'refs/tags/user-service-v') + steps: + - name: Extract version from tag + run: | + IMAGE_TAG="${GITHUB_REF#refs/tags/user-service-}" + echo "IMAGE_TAG=$IMAGE_TAG" >> $GITHUB_ENV + docker: name: Build Spring Boot Docker Image and push runs-on: ubuntu-latest - needs: build + needs: + - build + - set-image-tag if: startsWith(github.ref, 'refs/tags/user-service-v') steps: @@ -129,18 +143,20 @@ jobs: context: ./apps/user-service push: true tags: | - ghcr.io/${{ env.REPO_LC }}/user-service:${{ github.ref_name }} + ghcr.io/${{ env.REPO_LC }}/user-service:${{ env.IMAGE_TAG }} ghcr.io/${{ env.REPO_LC }}/user-service:latest - name: Analyze image layers run: | echo "=== Image Layer Analysis ===" - docker history ghcr.io/${{ env.REPO_LC }}/user-service:${{ github.ref_name }} --human --no-trunc + docker history ghcr.io/${{ env.REPO_LC }}/user-service:${{ env.IMAGE_TAG }} --human --no-trunc swagger-docs: name: Deploy Swagger Documentation runs-on: ubuntu-latest - needs: build + needs: + - build + - set-image-tag if: startsWith(github.ref, 'refs/tags/user-service-v') steps: @@ -156,12 +172,12 @@ jobs: - name: Generate Swagger UI uses: Legion2/swagger-ui-action@v1 with: - output: user-service-swagger-ui-${{ github.ref_name }} + output: user-service-swagger-ui-${{ env.IMAGE_TAG }} spec-file: openapi-spec/openapi3.yaml - name: Deploy to GitHub Pages uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./user-service-swagger-ui-${{ github.ref_name }} - destination_dir: user-service/${{ github.ref_name }} + publish_dir: ./user-service-swagger-ui-${{ env.IMAGE_TAG }} + destination_dir: user-service/${{ env.IMAGE_TAG }} diff --git a/.github/workflows/ci-python.yml b/.github/workflows/ci-python.yml index 35b6b9cd..918ae650 100644 --- a/.github/workflows/ci-python.yml +++ b/.github/workflows/ci-python.yml @@ -2,7 +2,6 @@ name: CI (Python/FastAPI) on: push: - # pre-processing 서비스 태그 기준 tags: - 'pre-processing-v*' paths: @@ -28,11 +27,9 @@ jobs: if: github.event_name == 'pull_request' && github.event.pull_request.draft == false name: Lint & Format Check runs-on: ubuntu-latest - defaults: run: working-directory: apps/pre-processing-service - steps: - name: Checkout repository uses: actions/checkout@v4 @@ -67,11 +64,9 @@ jobs: name: Run Tests runs-on: ubuntu-latest needs: lint - defaults: run: working-directory: apps/pre-processing-service - steps: - name: Checkout repository uses: actions/checkout@v4 @@ -109,10 +104,22 @@ jobs: ENV_NAME: test run: poetry run pytest + set-image-tag: + name: Set IMAGE_TAG + runs-on: ubuntu-latest + if: startsWith(github.ref, 'refs/tags/pre-processing-v') + steps: + - name: Extract version from tag + run: | + IMAGE_TAG="${GITHUB_REF#refs/tags/pre-processing-}" + echo "IMAGE_TAG=$IMAGE_TAG" >> $GITHUB_ENV + build-and-push-docker: name: Build Docker Image and push runs-on: ubuntu-latest - needs: test + needs: + - test + - set-image-tag if: startsWith(github.ref, 'refs/tags/pre-processing-v') steps: @@ -135,10 +142,10 @@ jobs: context: ./apps/pre-processing-service push: true tags: | - ghcr.io/${{ env.REPO_LC }}/pre-processing-service:${{ github.ref_name }} + ghcr.io/${{ env.REPO_LC }}/pre-processing-service:${{ env.IMAGE_TAG }} ghcr.io/${{ env.REPO_LC }}/pre-processing-service:latest - name: Analyze image layers run: | echo "=== Image Layer Analysis ===" - docker history ghcr.io/${{ env.REPO_LC }}/pre-processing-service:${{ github.ref_name }} --human --no-trunc + docker history ghcr.io/${{ env.REPO_LC }}/pre-processing-service:${{ env.IMAGE_TAG }} --human --no-trunc From 429cf81497f82c6a0c1e9a9af7fb6f9765e6e6e7 Mon Sep 17 00:00:00 2001 From: kakusiA Date: Wed, 10 Sep 2025 18:09:11 +0900 Subject: [PATCH 26/31] =?UTF-8?q?chore:CI=20=ED=85=8C=EC=8A=A4=ED=8A=B8=20?= =?UTF-8?q?=EC=9E=A0=EC=8B=9C=20=EC=A3=BC=EC=84=9D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/ci-python.yml | 106 ++++++++++++++++---------------- 1 file changed, 53 insertions(+), 53 deletions(-) diff --git a/.github/workflows/ci-python.yml b/.github/workflows/ci-python.yml index 918ae650..96f67f27 100644 --- a/.github/workflows/ci-python.yml +++ b/.github/workflows/ci-python.yml @@ -60,59 +60,59 @@ jobs: - name: Run Formatter Check (Black) run: poetry run black --check . - test: - name: Run Tests - runs-on: ubuntu-latest - needs: lint - defaults: - run: - working-directory: apps/pre-processing-service - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set up Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - - name: Install Poetry - uses: snok/install-poetry@v1 - with: - virtualenvs-create: true - virtualenvs-in-project: true - installer-parallel: true - - - name: Load cached venv - id: cached-poetry-dependencies - uses: actions/cache@v4 - with: - path: apps/pre-processing-service/.venv - key: venv-${{ runner.os }}-${{ hashFiles('apps/pre-processing-service/poetry.lock') }} - - - name: Install dependencies - if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' - run: poetry install --no-interaction --no-root - - - name: Run tests with Pytest - env: - DB_HOST: localhost - DB_PORT: 3306 - DB_USER: test_user - DB_PASS: test_pass - DB_NAME: test_db - ENV_NAME: test - run: poetry run pytest - - set-image-tag: - name: Set IMAGE_TAG - runs-on: ubuntu-latest - if: startsWith(github.ref, 'refs/tags/pre-processing-v') - steps: - - name: Extract version from tag - run: | - IMAGE_TAG="${GITHUB_REF#refs/tags/pre-processing-}" - echo "IMAGE_TAG=$IMAGE_TAG" >> $GITHUB_ENV + # test: + # name: Run Tests + # runs-on: ubuntu-latest + # needs: lint + # defaults: + # run: + # working-directory: apps/pre-processing-service + # steps: + # - name: Checkout repository + # uses: actions/checkout@v4 + + # - name: Set up Python 3.11 + # uses: actions/setup-python@v5 + # with: + # python-version: "3.11" + + # - name: Install Poetry + # uses: snok/install-poetry@v1 + # with: + # virtualenvs-create: true + # virtualenvs-in-project: true + # installer-parallel: true + + # - name: Load cached venv + # id: cached-poetry-dependencies + # uses: actions/cache@v4 + # with: + # path: apps/pre-processing-service/.venv + # key: venv-${{ runner.os }}-${{ hashFiles('apps/pre-processing-service/poetry.lock') }} + + # - name: Install dependencies + # if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + # run: poetry install --no-interaction --no-root + + # - name: Run tests with Pytest + # env: + # DB_HOST: localhost + # DB_PORT: 3306 + # DB_USER: test_user + # DB_PASS: test_pass + # DB_NAME: test_db + # ENV_NAME: test + # run: poetry run pytest + + # set-image-tag: + # name: Set IMAGE_TAG + # runs-on: ubuntu-latest + # if: startsWith(github.ref, 'refs/tags/pre-processing-v') + # steps: + # - name: Extract version from tag + # run: | + # IMAGE_TAG="${GITHUB_REF#refs/tags/pre-processing-}" + # echo "IMAGE_TAG=$IMAGE_TAG" >> $GITHUB_ENV build-and-push-docker: name: Build Docker Image and push From f14fd3f45d219d429fdacff6e41c70a186e8bda0 Mon Sep 17 00:00:00 2001 From: JiHoon Date: Wed, 10 Sep 2025 18:11:09 +0900 Subject: [PATCH 27/31] =?UTF-8?q?chore=20:=20poetry=20pytorch=20cpu=20?= =?UTF-8?q?=EB=B2=84=EC=A0=84=20=EC=84=A4=EC=B9=98=20=EB=B0=8F=20docker-co?= =?UTF-8?q?mpose.yml=EC=97=90=20env=EB=AA=85=EB=A0=B9=EC=96=B4=20=EC=B6=94?= =?UTF-8?q?=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/pre-processing-service/poetry.lock | 227 +-------------------- apps/pre-processing-service/pyproject.toml | 1 - docker/production/docker-compose.yml | 4 + 3 files changed, 6 insertions(+), 226 deletions(-) diff --git a/apps/pre-processing-service/poetry.lock b/apps/pre-processing-service/poetry.lock index 26eab19f..2a535f3d 100644 --- a/apps/pre-processing-service/poetry.lock +++ b/apps/pre-processing-service/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. [[package]] name = "annotated-types" @@ -924,24 +924,6 @@ files = [ {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] -[[package]] -name = "jinja2" -version = "3.1.6" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, - {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - [[package]] name = "joblib" version = "1.5.2" @@ -973,77 +955,6 @@ win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] -[[package]] -name = "markupsafe" -version = "3.0.2" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, - {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, -] - [[package]] name = "mecab-python3" version = "1.0.10" @@ -1097,24 +1008,6 @@ files = [ unidic = ["unidic"] unidic-lite = ["unidic-lite"] -[[package]] -name = "mpmath" -version = "1.3.0" -description = "Python library for arbitrary-precision floating-point arithmetic" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, - {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, -] - -[package.extras] -develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] -docs = ["sphinx"] -gmpy = ["gmpy2 (>=2.1.0a4) ; platform_python_implementation != \"PyPy\""] -tests = ["pytest (>=4.6)"] - [[package]] name = "mypy-extensions" version = "1.1.0" @@ -1127,27 +1020,6 @@ files = [ {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] -[[package]] -name = "networkx" -version = "3.5" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.11" -groups = ["main"] -files = [ - {file = "networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec"}, - {file = "networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037"}, -] - -[package.extras] -default = ["matplotlib (>=3.8)", "numpy (>=1.25)", "pandas (>=2.0)", "scipy (>=1.11.2)"] -developer = ["mypy (>=1.15)", "pre-commit (>=4.1)"] -doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=10)", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8.0)", "sphinx-gallery (>=0.18)", "texext (>=0.6.7)"] -example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=2.0.0)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] -extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)", "pytest-xdist (>=3.0)"] -test-extras = ["pytest-mpl", "pytest-randomly"] - [[package]] name = "numpy" version = "2.3.3" @@ -2157,28 +2029,6 @@ typing_extensions = ">=4.14.0,<4.15.0" urllib3 = {version = ">=2.5.0,<3.0", extras = ["socks"]} websocket-client = ">=1.8.0,<1.9.0" -[[package]] -name = "setuptools" -version = "80.9.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.9" -groups = ["main"] -markers = "python_version >= \"3.12\"" -files = [ - {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, - {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] -core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] - [[package]] name = "sniffio" version = "1.3.1" @@ -2330,24 +2180,6 @@ typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\"" [package.extras] full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] -[[package]] -name = "sympy" -version = "1.14.0" -description = "Computer algebra system (CAS) in Python" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5"}, - {file = "sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517"}, -] - -[package.dependencies] -mpmath = ">=1.1.0,<1.4" - -[package.extras] -dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] - [[package]] name = "threadpoolctl" version = "3.6.0" @@ -2393,61 +2225,6 @@ dev = ["tokenizers[testing]"] docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] testing = ["black (==22.3)", "datasets", "numpy", "pytest", "pytest-asyncio", "requests", "ruff"] -[[package]] -name = "torch" -version = "2.8.0+cpu" -description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -optional = false -python-versions = ">=3.9.0" -groups = ["main"] -files = [ - {file = "torch-2.8.0+cpu-cp310-cp310-linux_s390x.whl", hash = "sha256:5d255d259fbc65439b671580e40fdb8faea4644761b64fed90d6904ffe71bbc1"}, - {file = "torch-2.8.0+cpu-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b2149858b8340aeeb1f3056e0bff5b82b96e43b596fe49a9dba3184522261213"}, - {file = "torch-2.8.0+cpu-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:16d75fa4e96ea28a785dfd66083ca55eb1058b6d6c5413f01656ca965ee2077e"}, - {file = "torch-2.8.0+cpu-cp310-cp310-win_amd64.whl", hash = "sha256:7cc4af6ba954f36c2163eab98cf113c137fc25aa8bbf1b06ef155968627beed2"}, - {file = "torch-2.8.0+cpu-cp311-cp311-linux_s390x.whl", hash = "sha256:2bfc013dd6efdc8f8223a0241d3529af9f315dffefb53ffa3bf14d3f10127da6"}, - {file = "torch-2.8.0+cpu-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:680129efdeeec3db5da3f88ee5d28c1b1e103b774aef40f9d638e2cce8f8d8d8"}, - {file = "torch-2.8.0+cpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cb06175284673a581dd91fb1965662ae4ecaba6e5c357aa0ea7bb8b84b6b7eeb"}, - {file = "torch-2.8.0+cpu-cp311-cp311-win_amd64.whl", hash = "sha256:7631ef49fbd38d382909525b83696dc12a55d68492ade4ace3883c62b9fc140f"}, - {file = "torch-2.8.0+cpu-cp311-cp311-win_arm64.whl", hash = "sha256:41e6fc5ec0914fcdce44ccf338b1d19a441b55cafdd741fd0bf1af3f9e4cfd14"}, - {file = "torch-2.8.0+cpu-cp312-cp312-linux_s390x.whl", hash = "sha256:0e34e276722ab7dd0dffa9e12fe2135a9b34a0e300c456ed7ad6430229404eb5"}, - {file = "torch-2.8.0+cpu-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:610f600c102386e581327d5efc18c0d6edecb9820b4140d26163354a99cd800d"}, - {file = "torch-2.8.0+cpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:cb9a8ba8137ab24e36bf1742cb79a1294bd374db570f09fc15a5e1318160db4e"}, - {file = "torch-2.8.0+cpu-cp312-cp312-win_amd64.whl", hash = "sha256:2be20b2c05a0cce10430cc25f32b689259640d273232b2de357c35729132256d"}, - {file = "torch-2.8.0+cpu-cp312-cp312-win_arm64.whl", hash = "sha256:99fc421a5d234580e45957a7b02effbf3e1c884a5dd077afc85352c77bf41434"}, - {file = "torch-2.8.0+cpu-cp313-cp313-linux_s390x.whl", hash = "sha256:8b5882276633cf91fe3d2d7246c743b94d44a7e660b27f1308007fdb1bb89f7d"}, - {file = "torch-2.8.0+cpu-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:a5064b5e23772c8d164068cc7c12e01a75faf7b948ecd95a0d4007d7487e5f25"}, - {file = "torch-2.8.0+cpu-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:8f81dedb4c6076ec325acc3b47525f9c550e5284a18eae1d9061c543f7b6e7de"}, - {file = "torch-2.8.0+cpu-cp313-cp313-win_amd64.whl", hash = "sha256:e1ee1b2346ade3ea90306dfbec7e8ff17bc220d344109d189ae09078333b0856"}, - {file = "torch-2.8.0+cpu-cp313-cp313-win_arm64.whl", hash = "sha256:64c187345509f2b1bb334feed4666e2c781ca381874bde589182f81247e61f88"}, - {file = "torch-2.8.0+cpu-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:af81283ac671f434b1b25c95ba295f270e72db1fad48831eb5e4748ff9840041"}, - {file = "torch-2.8.0+cpu-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:a9dbb6f64f63258bc811e2c0c99640a81e5af93c531ad96e95c5ec777ea46dab"}, - {file = "torch-2.8.0+cpu-cp313-cp313t-win_amd64.whl", hash = "sha256:6d93a7165419bc4b2b907e859ccab0dea5deeab261448ae9a5ec5431f14c0e64"}, - {file = "torch-2.8.0+cpu-cp39-cp39-linux_s390x.whl", hash = "sha256:5239ef35402000844b676a9b79ed76d5ae6b028a6762bbdfebdf8421a0f4d2aa"}, - {file = "torch-2.8.0+cpu-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:eac8b7ef5c7ca106daec5e829dfa8ca56ca47601db13b402d2608861ad3ab926"}, - {file = "torch-2.8.0+cpu-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:bda4f93d64dcd9ae5d51844bbccc6fcb7d603522bcc95d256b5fe3bdb9dccca3"}, - {file = "torch-2.8.0+cpu-cp39-cp39-win_amd64.whl", hash = "sha256:e3c3fce24ebaac954b837d1498e36d484ad0d93e2a1ed5b6b0c55a02ea748fab"}, -] - -[package.dependencies] -filelock = "*" -fsspec = "*" -jinja2 = "*" -networkx = "*" -setuptools = {version = "*", markers = "python_version >= \"3.12\""} -sympy = ">=1.13.3" -typing-extensions = ">=4.10.0" - -[package.extras] -opt-einsum = ["opt-einsum (>=3.3)"] -optree = ["optree (>=0.13.0)"] -pyyaml = ["pyyaml"] - -[package.source] -type = "legacy" -url = "https://download.pytorch.org/whl/cpu" -reference = "pytorch" - [[package]] name = "tqdm" version = "4.67.1" @@ -2712,4 +2489,4 @@ h11 = ">=0.9.0,<1" [metadata] lock-version = "2.1" python-versions = ">=3.11,<3.14" -content-hash = "b0e5c64a4a497967e0291b75d8e4dc78a435af95437892b8254f2e170a7cf567" +content-hash = "1c4515b957a639ee4f2aecf7e2a9b856228870ff4cdfeccd320dc5376fc3605b" diff --git a/apps/pre-processing-service/pyproject.toml b/apps/pre-processing-service/pyproject.toml index 62e90397..34ece3ae 100644 --- a/apps/pre-processing-service/pyproject.toml +++ b/apps/pre-processing-service/pyproject.toml @@ -28,7 +28,6 @@ bs4 = ">=0.0.2,<0.0.3" selenium = ">=4.35.0,<5.0.0" transformers = ">=4.56.0,<5.0.0" numpy = ">=2.3.2,<3.0.0" -torch = { version = "^2.4.0", source = "pytorch" } #torch = ">=2.8.0,<3.0.0" scikit-learn = ">=1.7.1,<2.0.0" python-dotenv = ">=1.1.1,<2.0.0" diff --git a/docker/production/docker-compose.yml b/docker/production/docker-compose.yml index 04ea3466..fa3ca0cc 100644 --- a/docker/production/docker-compose.yml +++ b/docker/production/docker-compose.yml @@ -9,6 +9,8 @@ services: - "80:8080" networks: - app-network + env_file: + - .env.prod pre-processing-service: image: ghcr.io/kernel180-be12/final-4team-icebang/pre-processing-service:latest @@ -18,6 +20,8 @@ services: - "8000:8000" networks: - app-network + env_file: + - .env.prod networks: app-network: From bea03093151343c5fb7a87ee7388bc354d59d8b7 Mon Sep 17 00:00:00 2001 From: can019 Date: Wed, 10 Sep 2025 18:18:31 +0900 Subject: [PATCH 28/31] hotfix: release drafter config --- .github/release-drafter-pre-processing.yml | 24 ++++++++++++---- .github/release-drafter-user-service.yml | 29 ++++++++++++++++---- .github/workflows/release-drafter.yml | 32 +++++++++------------- 3 files changed, 56 insertions(+), 29 deletions(-) diff --git a/.github/release-drafter-pre-processing.yml b/.github/release-drafter-pre-processing.yml index 691e70b6..e9feffac 100644 --- a/.github/release-drafter-pre-processing.yml +++ b/.github/release-drafter-pre-processing.yml @@ -1,30 +1,44 @@ name-template: 'pre-processing-v$RESOLVED_VERSION' tag-template: 'pre-processing-v$RESOLVED_VERSION' + categories: - - title: 'Feature' + - title: '🚀 Features' labels: - 'enhancement' - 'pre-processing' - - title: 'Bug Fixes' + - title: '🐛 Bug Fixes' labels: - 'bug' - 'pre-processing' + - title: '🧰 Maintenance' + labels: + - 'maintenance' + - 'pre-processing' + change-template: '- $TITLE @$AUTHOR (#$NUMBER)' -change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks. +change-title-escapes: '\<*_&' + version-resolver: major: labels: - 'major' + - 'breaking-change' - 'pre-processing' minor: labels: - 'minor' + - 'enhancement' - 'pre-processing' patch: labels: - 'patch' + - 'bug' - 'pre-processing' default: patch + template: | - ## Changes - $CHANGES \ No newline at end of file + ## What's Changed + + $CHANGES + + **Full Changelog**: https://github.com/$OWNER/$REPOSITORY/compare/$PREVIOUS_TAG...pre-processing-v$RESOLVED_VERSION diff --git a/.github/release-drafter-user-service.yml b/.github/release-drafter-user-service.yml index abc943aa..7259f4b4 100644 --- a/.github/release-drafter-user-service.yml +++ b/.github/release-drafter-user-service.yml @@ -1,25 +1,44 @@ name-template: 'user-service-v$RESOLVED_VERSION' tag-template: 'user-service-v$RESOLVED_VERSION' + categories: - - title: 'Feature' + - title: '🚀 Features' labels: - 'enhancement' - - title: 'Bug Fixes' + - 'user-service' + - title: '🐛 Bug Fixes' labels: - 'bug' + - 'user-service' + - title: '🧰 Maintenance' + labels: + - 'maintenance' + - 'user-service' + change-template: '- $TITLE @$AUTHOR (#$NUMBER)' -change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks. +change-title-escapes: '\<*_&' + version-resolver: major: labels: - 'major' + - 'breaking-change' + - 'user-service' minor: labels: - 'minor' + - 'enhancement' + - 'user-service' patch: labels: - 'patch' + - 'bug' + - 'user-service' default: patch + template: | - ## Changes - $CHANGES \ No newline at end of file + ## What's Changed + + $CHANGES + + **Full Changelog**: https://github.com/$OWNER/$REPOSITORY/compare/$PREVIOUS_TAG...user-service-v$RESOLVED_VERSION \ No newline at end of file diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index 148ddb40..a29ba80b 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -4,41 +4,35 @@ on: push: branches: - main - paths: - - 'apps/user-service/**' - - 'apps/pre-processing/**' + - master pull_request: types: [opened, reopened, synchronize] - paths: - - 'apps/user-service/**' - - 'apps/pre-processing/**' permissions: contents: read + pull-requests: read jobs: - update_user_service_release: - runs-on: ubuntu-latest + update_release_draft: permissions: contents: write pull-requests: write - if: ${{ github.event_name == 'push' && contains(github.event.head_commit.modified, 'apps/user-service/') || github.event_name == 'pull_request' }} + runs-on: ubuntu-latest steps: + # Pre-processing service - uses: release-drafter/release-drafter@v5 with: - config-name: release-drafter-user-service.yml + config-name: release-drafter-pre-processing.yml + name: 'pre-processing-v$RESOLVED_VERSION' + tag: 'pre-processing-v$RESOLVED_VERSION' env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - update_preprocessing_release: - runs-on: ubuntu-latest - permissions: - contents: write - pull-requests: write - if: ${{ github.event_name == 'push' && contains(github.event.head_commit.modified, 'apps/pre-processing/') || github.event_name == 'pull_request' }} - steps: + # User service - uses: release-drafter/release-drafter@v5 with: - config-name: release-drafter-pre-processing.yml + config-name: release-drafter-user-service.yml + name: 'user-service-v$RESOLVED_VERSION' + tag: 'user-service-v$RESOLVED_VERSION' env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file From 7b2a6e55742915852045a9ca743c2a7b58b8021a Mon Sep 17 00:00:00 2001 From: can019 Date: Wed, 10 Sep 2025 18:22:56 +0900 Subject: [PATCH 29/31] fix: python ci --- .github/workflows/ci-python.yml | 106 ++++++++++++++++---------------- 1 file changed, 53 insertions(+), 53 deletions(-) diff --git a/.github/workflows/ci-python.yml b/.github/workflows/ci-python.yml index 96f67f27..2ffa9661 100644 --- a/.github/workflows/ci-python.yml +++ b/.github/workflows/ci-python.yml @@ -60,59 +60,59 @@ jobs: - name: Run Formatter Check (Black) run: poetry run black --check . - # test: - # name: Run Tests - # runs-on: ubuntu-latest - # needs: lint - # defaults: - # run: - # working-directory: apps/pre-processing-service - # steps: - # - name: Checkout repository - # uses: actions/checkout@v4 - - # - name: Set up Python 3.11 - # uses: actions/setup-python@v5 - # with: - # python-version: "3.11" - - # - name: Install Poetry - # uses: snok/install-poetry@v1 - # with: - # virtualenvs-create: true - # virtualenvs-in-project: true - # installer-parallel: true - - # - name: Load cached venv - # id: cached-poetry-dependencies - # uses: actions/cache@v4 - # with: - # path: apps/pre-processing-service/.venv - # key: venv-${{ runner.os }}-${{ hashFiles('apps/pre-processing-service/poetry.lock') }} - - # - name: Install dependencies - # if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' - # run: poetry install --no-interaction --no-root - - # - name: Run tests with Pytest - # env: - # DB_HOST: localhost - # DB_PORT: 3306 - # DB_USER: test_user - # DB_PASS: test_pass - # DB_NAME: test_db - # ENV_NAME: test - # run: poetry run pytest - - # set-image-tag: - # name: Set IMAGE_TAG - # runs-on: ubuntu-latest - # if: startsWith(github.ref, 'refs/tags/pre-processing-v') - # steps: - # - name: Extract version from tag - # run: | - # IMAGE_TAG="${GITHUB_REF#refs/tags/pre-processing-}" - # echo "IMAGE_TAG=$IMAGE_TAG" >> $GITHUB_ENV + test: + name: Run Tests + runs-on: ubuntu-latest + needs: lint + defaults: + run: + working-directory: apps/pre-processing-service + steps: + - name: Checkout repository + uses: actions/checkout@v4 + +# - name: Set up Python 3.11 +# uses: actions/setup-python@v5 +# with: +# python-version: "3.11" +# +# - name: Install Poetry +# uses: snok/install-poetry@v1 +# with: +# virtualenvs-create: true +# virtualenvs-in-project: true +# installer-parallel: true +# +# - name: Load cached venv +# id: cached-poetry-dependencies +# uses: actions/cache@v4 +# with: +# path: apps/pre-processing-service/.venv +# key: venv-${{ runner.os }}-${{ hashFiles('apps/pre-processing-service/poetry.lock') }} +# +# - name: Install dependencies +# if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' +# run: poetry install --no-interaction --no-root +# +# - name: Run tests with Pytest +# env: +# DB_HOST: localhost +# DB_PORT: 3306 +# DB_USER: test_user +# DB_PASS: test_pass +# DB_NAME: test_db +# ENV_NAME: test +# run: poetry run pytest + + set-image-tag: + name: Set IMAGE_TAG + runs-on: ubuntu-latest + if: startsWith(github.ref, 'refs/tags/pre-processing-v') + steps: + - name: Extract version from tag + run: | + IMAGE_TAG="${GITHUB_REF#refs/tags/pre-processing-}" + echo "IMAGE_TAG=$IMAGE_TAG" >> $GITHUB_ENV build-and-push-docker: name: Build Docker Image and push From 6faa97717683cf2b8193011c5366265b31f265e3 Mon Sep 17 00:00:00 2001 From: can019 Date: Wed, 10 Sep 2025 18:27:15 +0900 Subject: [PATCH 30/31] fix: Slash in upload artifact --- .github/workflows/ci-java.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci-java.yml b/.github/workflows/ci-java.yml index 2def4e8c..992b34f2 100644 --- a/.github/workflows/ci-java.yml +++ b/.github/workflows/ci-java.yml @@ -89,14 +89,14 @@ jobs: if: matrix.java-version == '21' uses: actions/upload-artifact@v4 with: - name: build-artifacts-${{ github.ref_name }} + name: build-artifacts-${{ github.run_id }}-${{ github.run_attempt }} path: apps/user-service/build/libs/ - name: Upload OpenAPI spec artifacts if: matrix.java-version == '21' uses: actions/upload-artifact@v4 with: - name: openapi-spec-${{ github.ref_name }} + name: openapi-spec-${{ github.run_id }}-${{ github.run_attempt }} path: apps/user-service/build/api-spec/ set-image-tag: @@ -124,7 +124,7 @@ jobs: - name: Download build artifacts (JAR) uses: actions/download-artifact@v4 with: - name: build-artifacts-${{ github.ref_name }} + name: build-artifacts-${{ github.run_id }}-${{ github.run_attempt }} path: apps/user-service/build/libs/ - name: Login to Docker Registry @@ -166,7 +166,7 @@ jobs: - name: Download OpenAPI spec artifacts uses: actions/download-artifact@v4 with: - name: openapi-spec-${{ github.ref_name }} + name: openapi-spec-${{ github.run_id }}-${{ github.run_attempt }} path: ./openapi-spec - name: Generate Swagger UI @@ -180,4 +180,4 @@ jobs: with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./user-service-swagger-ui-${{ env.IMAGE_TAG }} - destination_dir: user-service/${{ env.IMAGE_TAG }} + destination_dir: user-service/${{ env.IMAGE_TAG }} \ No newline at end of file From 360c9b61e35122f6734ec9a8a45120286a2f152e Mon Sep 17 00:00:00 2001 From: can019 Date: Wed, 10 Sep 2025 18:31:28 +0900 Subject: [PATCH 31/31] =?UTF-8?q?fix:=20upload=20artifact=20=EC=A1=B0?= =?UTF-8?q?=EA=B1=B4=20=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/ci-java.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-java.yml b/.github/workflows/ci-java.yml index 992b34f2..a0a7ebbb 100644 --- a/.github/workflows/ci-java.yml +++ b/.github/workflows/ci-java.yml @@ -86,14 +86,14 @@ jobs: working-directory: apps/user-service - name: Upload build artifacts - if: matrix.java-version == '21' + if: matrix.java-version == '21' && github.ref == 'refs/heads/main' uses: actions/upload-artifact@v4 with: name: build-artifacts-${{ github.run_id }}-${{ github.run_attempt }} path: apps/user-service/build/libs/ - name: Upload OpenAPI spec artifacts - if: matrix.java-version == '21' + if: matrix.java-version == '21' && github.ref == 'refs/heads/main' uses: actions/upload-artifact@v4 with: name: openapi-spec-${{ github.run_id }}-${{ github.run_attempt }}