From cb3dd19ee8fd4cea2558936d9f82deb969767f1f Mon Sep 17 00:00:00 2001 From: thkim7 Date: Thu, 11 Sep 2025 17:41:56 +0900 Subject: [PATCH 01/57] =?UTF-8?q?feat:=20=EC=9C=A0=EC=82=AC=EB=8F=84=20tor?= =?UTF-8?q?ch=20=EC=82=AD=EC=A0=9C=20=ED=9B=84=20onnx=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/ci-python.yml | 2 + .../app/service/similarity_service.py | 4 +- .../app/utils/similarity_analyzer.py | 105 +++++-------- apps/pre-processing-service/poetry.lock | 141 +++++++++++++++++- apps/pre-processing-service/pyproject.toml | 10 +- 5 files changed, 178 insertions(+), 84 deletions(-) diff --git a/.github/workflows/ci-python.yml b/.github/workflows/ci-python.yml index ad50c51b..54d1ab1a 100644 --- a/.github/workflows/ci-python.yml +++ b/.github/workflows/ci-python.yml @@ -2,6 +2,8 @@ name: CI (Python/FastAPI) on: push: + branches: + - feature/onnx tags: - 'pre-processing-v*' pull_request: diff --git a/apps/pre-processing-service/app/service/similarity_service.py b/apps/pre-processing-service/app/service/similarity_service.py index 0ccb6b2c..dbd2b762 100644 --- a/apps/pre-processing-service/app/service/similarity_service.py +++ b/apps/pre-processing-service/app/service/similarity_service.py @@ -1,4 +1,4 @@ -from app.utils.similarity_analyzer import SimilarityAnalyzer +from app.utils.similarity_analyzer import SimilarityAnalyzerONNX from app.errors.CustomException import InvalidItemDataException from ..model.schemas import RequestSadaguSimilarity from loguru import logger @@ -43,7 +43,7 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict analysis_mode = "matched_products" try: - analyzer = SimilarityAnalyzer() + analyzer = SimilarityAnalyzerONNX() logger.info( f"키워드 '{keyword}'와 {len(candidates)}개 상품의 유사도 분석 시작... (모드: {analysis_mode})" diff --git a/apps/pre-processing-service/app/utils/similarity_analyzer.py b/apps/pre-processing-service/app/utils/similarity_analyzer.py index f1c3104e..aec6d48b 100644 --- a/apps/pre-processing-service/app/utils/similarity_analyzer.py +++ b/apps/pre-processing-service/app/utils/similarity_analyzer.py @@ -1,42 +1,37 @@ -import torch import numpy as np from sklearn.metrics.pairwise import cosine_similarity -from transformers import AutoTokenizer, AutoModel from loguru import logger +import onnxruntime as ort +from transformers import AutoTokenizer +class SimilarityAnalyzerONNX: + """ONNX 기반 텍스트 유사도 분석기""" -class SimilarityAnalyzer: - """텍스트 유사도 분석기""" - - def __init__(self): + def __init__(self, model_path: str = "klue_bert.onnx"): try: - logger.info("KLUE BERT 모델 로딩 시도 중...") + logger.info("토크나이저 로딩 중: klue/bert-base") self.tokenizer = AutoTokenizer.from_pretrained("klue/bert-base") - self.model = AutoModel.from_pretrained("klue/bert-base") - logger.success("KLUE BERT 모델 로딩 성공") + logger.info(f"ONNX 모델 로딩 중: {model_path}") + self.ort_session = ort.InferenceSession(model_path) + logger.success("ONNX 모델 로딩 성공") except Exception as e: - logger.warning(f"KLUE BERT 로딩 실패, 다국어 BERT로 대체: {e}") - try: - logger.info("다국어 BERT 모델 로딩 시도 중...") - self.tokenizer = AutoTokenizer.from_pretrained( - "bert-base-multilingual-cased" - ) - self.model = AutoModel.from_pretrained("bert-base-multilingual-cased") - logger.success("다국어 BERT 모델 로딩 성공") - except Exception as e2: - logger.error(f"모든 BERT 모델 로딩 실패: {e2}") - raise e2 + logger.error(f"모델 로딩 실패: {e}") + raise e def get_embedding(self, text: str) -> np.ndarray: - """텍스트 임베딩 생성""" + """텍스트 임베딩 생성 (ONNX)""" try: logger.debug(f"임베딩 생성 시작: text='{text[:50]}'") + # 토큰화 inputs = self.tokenizer( - text, return_tensors="pt", padding=True, truncation=True, max_length=128 + text, return_tensors="np", padding=True, truncation=True, max_length=128 ) - with torch.no_grad(): - outputs = self.model(**inputs) - embedding = outputs.last_hidden_state[:, 0, :].numpy() + ort_inputs = { + "input_ids": inputs["input_ids"].astype(np.int64), + "attention_mask": inputs["attention_mask"].astype(np.int64) + } + ort_outs = self.ort_session.run(None, ort_inputs) + embedding = ort_outs[0][:, 0, :] # [CLS] 토큰 임베딩 logger.debug(f"임베딩 생성 완료: shape={embedding.shape}") return embedding except Exception as e: @@ -46,65 +41,35 @@ def get_embedding(self, text: str) -> np.ndarray: def calculate_similarity(self, text1: str, text2: str) -> float: """두 텍스트 간 유사도 계산""" try: - logger.debug( - f"유사도 계산 시작: text1='{text1[:30]}', text2='{text2[:30]}'" - ) - embedding1 = self.get_embedding(text1) - embedding2 = self.get_embedding(text2) - similarity = cosine_similarity(embedding1, embedding2)[0][0] + logger.debug(f"유사도 계산 시작: text1='{text1[:30]}', text2='{text2[:30]}'") + emb1 = self.get_embedding(text1) + emb2 = self.get_embedding(text2) + similarity = cosine_similarity(emb1, emb2)[0][0] logger.debug(f"유사도 계산 완료: similarity={similarity:.4f}") return similarity except Exception as e: - logger.error( - f"유사도 계산 오류: text1='{text1[:30]}', text2='{text2[:30]}', error='{e}'" - ) + logger.error(f"유사도 계산 오류: {e}") raise - def analyze_similarity_batch( - self, keyword: str, product_titles: list[str] - ) -> list[dict]: - """배치로 유사도 분석""" - logger.info( - f"배치 유사도 분석 시작: keyword='{keyword}', titles_count={len(product_titles)}" - ) - + def analyze_similarity_batch(self, keyword: str, product_titles: list[str]) -> list[dict]: + """배치 유사도 분석""" + logger.info(f"배치 유사도 분석 시작: keyword='{keyword}', titles_count={len(product_titles)}") try: - keyword_embedding = self.get_embedding(keyword) + keyword_emb = self.get_embedding(keyword) results = [] for i, title in enumerate(product_titles): try: - logger.debug( - f"유사도 계산 중 ({i + 1}/{len(product_titles)}): title='{title[:30]}'" - ) - title_embedding = self.get_embedding(title) - similarity = cosine_similarity(keyword_embedding, title_embedding)[ - 0 - ][0] - - results.append( - { - "index": i, - "title": title, - "similarity": float(similarity), - "score": float(similarity), - } - ) - logger.debug( - f"유사도 계산 완료 ({i + 1}/{len(product_titles)}): similarity={similarity:.4f}" - ) + title_emb = self.get_embedding(title) + sim = cosine_similarity(keyword_emb, title_emb)[0][0] + results.append({"index": i, "title": title, "similarity": float(sim), "score": float(sim)}) except Exception as e: logger.error(f"유사도 계산 오류 (제목: {title[:30]}): {e}") - results.append( - {"index": i, "title": title, "similarity": 0.0, "score": 0.0} - ) + results.append({"index": i, "title": title, "similarity": 0.0, "score": 0.0}) - # 유사도 기준 내림차순 정렬 results.sort(key=lambda x: x["similarity"], reverse=True) - logger.info( - f"배치 유사도 분석 완료: 총 {len(results)}개, 최고 유사도={results[0]['similarity']:.4f}" - ) + logger.info(f"배치 유사도 분석 완료: 총 {len(results)}개, 최고 유사도={results[0]['similarity']:.4f}") return results except Exception as e: - logger.error(f"배치 유사도 분석 실패: keyword='{keyword}', error='{e}'") + logger.error(f"배치 유사도 분석 실패: {e}") raise diff --git a/apps/pre-processing-service/poetry.lock b/apps/pre-processing-service/poetry.lock index 2a535f3d..d34b3215 100644 --- a/apps/pre-processing-service/poetry.lock +++ b/apps/pre-processing-service/poetry.lock @@ -438,6 +438,24 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coloredlogs" +version = "15.0.1" +description = "Colored terminal output for Python's logging module" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +files = [ + {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, + {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, +] + +[package.dependencies] +humanfriendly = ">=9.1" + +[package.extras] +cron = ["capturer (>=2.4)"] + [[package]] name = "dbutils" version = "3.1.2" @@ -489,6 +507,18 @@ files = [ {file = "filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58"}, ] +[[package]] +name = "flatbuffers" +version = "25.2.10" +description = "The FlatBuffers serialization format for Python" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051"}, + {file = "flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e"}, +] + [[package]] name = "fsspec" version = "2025.9.0" @@ -820,14 +850,14 @@ trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httplib2" -version = "0.30.0" +version = "0.30.2" description = "A comprehensive HTTP client library." optional = false python-versions = ">=3.6" groups = ["main"] files = [ - {file = "httplib2-0.30.0-py3-none-any.whl", hash = "sha256:d10443a2bdfe0ea5dbb17e016726146d48b574208dafd41e854cf34e7d78842c"}, - {file = "httplib2-0.30.0.tar.gz", hash = "sha256:d5b23c11fcf8e57e00ff91b7008656af0f6242c8886fd97065c97509e4e548c5"}, + {file = "httplib2-0.30.2-py3-none-any.whl", hash = "sha256:62a665905c1f1d1069c34f933787d2a4435c67c0bc2b323645dcfbb64661b5ec"}, + {file = "httplib2-0.30.2.tar.gz", hash = "sha256:050bde6a332824b05a3deef5238f2b0372f71af46f8ca2190c2cb1f66aa376cd"}, ] [package.dependencies] @@ -897,6 +927,21 @@ testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "authlib (>=1. torch = ["safetensors[torch]", "torch"] typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] +[[package]] +name = "humanfriendly" +version = "10.0" +description = "Human friendly output for text interfaces using Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +files = [ + {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, + {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, +] + +[package.dependencies] +pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} + [[package]] name = "idna" version = "3.10" @@ -1008,6 +1053,24 @@ files = [ unidic = ["unidic"] unidic-lite = ["unidic-lite"] +[[package]] +name = "mpmath" +version = "1.3.0" +description = "Python library for arbitrary-precision floating-point arithmetic" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, + {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, +] + +[package.extras] +develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] +docs = ["sphinx"] +gmpy = ["gmpy2 (>=2.1.0a4) ; platform_python_implementation != \"PyPy\""] +tests = ["pytest (>=4.6)"] + [[package]] name = "mypy-extensions" version = "1.1.0" @@ -1121,6 +1184,42 @@ rsa = ["cryptography (>=3.0.0)"] signals = ["blinker (>=1.4.0)"] signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] +[[package]] +name = "onnxruntime" +version = "1.22.1" +description = "ONNX Runtime is a runtime accelerator for Machine Learning models" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "onnxruntime-1.22.1-cp310-cp310-macosx_13_0_universal2.whl", hash = "sha256:80e7f51da1f5201c1379b8d6ef6170505cd800e40da216290f5e06be01aadf95"}, + {file = "onnxruntime-1.22.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89ddfdbbdaf7e3a59515dee657f6515601d55cb21a0f0f48c81aefc54ff1b73"}, + {file = "onnxruntime-1.22.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bddc75868bcf6f9ed76858a632f65f7b1846bdcefc6d637b1e359c2c68609964"}, + {file = "onnxruntime-1.22.1-cp310-cp310-win_amd64.whl", hash = "sha256:01e2f21b2793eb0c8642d2be3cee34cc7d96b85f45f6615e4e220424158877ce"}, + {file = "onnxruntime-1.22.1-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:f4581bccb786da68725d8eac7c63a8f31a89116b8761ff8b4989dc58b61d49a0"}, + {file = "onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7ae7526cf10f93454beb0f751e78e5cb7619e3b92f9fc3bd51aa6f3b7a8977e5"}, + {file = "onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f6effa1299ac549a05c784d50292e3378dbbf010346ded67400193b09ddc2f04"}, + {file = "onnxruntime-1.22.1-cp311-cp311-win_amd64.whl", hash = "sha256:f28a42bb322b4ca6d255531bb334a2b3e21f172e37c1741bd5e66bc4b7b61f03"}, + {file = "onnxruntime-1.22.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:a938d11c0dc811badf78e435daa3899d9af38abee950d87f3ab7430eb5b3cf5a"}, + {file = "onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:984cea2a02fcc5dfea44ade9aca9fe0f7a8a2cd6f77c258fc4388238618f3928"}, + {file = "onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d39a530aff1ec8d02e365f35e503193991417788641b184f5b1e8c9a6d5ce8d"}, + {file = "onnxruntime-1.22.1-cp312-cp312-win_amd64.whl", hash = "sha256:6a64291d57ea966a245f749eb970f4fa05a64d26672e05a83fdb5db6b7d62f87"}, + {file = "onnxruntime-1.22.1-cp313-cp313-macosx_13_0_universal2.whl", hash = "sha256:d29c7d87b6cbed8fecfd09dca471832384d12a69e1ab873e5effbb94adc3e966"}, + {file = "onnxruntime-1.22.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:460487d83b7056ba98f1f7bac80287224c31d8149b15712b0d6f5078fcc33d0f"}, + {file = "onnxruntime-1.22.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b0c37070268ba4e02a1a9d28560cd00cd1e94f0d4f275cbef283854f861a65fa"}, + {file = "onnxruntime-1.22.1-cp313-cp313-win_amd64.whl", hash = "sha256:70980d729145a36a05f74b573435531f55ef9503bcda81fc6c3d6b9306199982"}, + {file = "onnxruntime-1.22.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33a7980bbc4b7f446bac26c3785652fe8730ed02617d765399e89ac7d44e0f7d"}, + {file = "onnxruntime-1.22.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7e823624b015ea879d976cbef8bfaed2f7e2cc233d7506860a76dd37f8f381"}, +] + +[package.dependencies] +coloredlogs = "*" +flatbuffers = "*" +numpy = ">=1.21.6" +packaging = "*" +protobuf = "*" +sympy = "*" + [[package]] name = "outcome" version = "1.3.0.post0" @@ -1575,6 +1674,22 @@ files = [ {file = "pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310"}, ] +[[package]] +name = "pyreadline3" +version = "3.5.4" +description = "A python implementation of GNU readline." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, + {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, +] + +[package.extras] +dev = ["build", "flake8", "mypy", "pytest", "twine"] + [[package]] name = "pysocks" version = "1.7.1" @@ -2180,6 +2295,24 @@ typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\"" [package.extras] full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] +[[package]] +name = "sympy" +version = "1.14.0" +description = "Computer algebra system (CAS) in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5"}, + {file = "sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517"}, +] + +[package.dependencies] +mpmath = ">=1.1.0,<1.4" + +[package.extras] +dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] + [[package]] name = "threadpoolctl" version = "3.6.0" @@ -2489,4 +2622,4 @@ h11 = ">=0.9.0,<1" [metadata] lock-version = "2.1" python-versions = ">=3.11,<3.14" -content-hash = "1c4515b957a639ee4f2aecf7e2a9b856228870ff4cdfeccd320dc5376fc3605b" +content-hash = "dfe0ec278c21033d4d35335403a808503862685899dfd5dd6173841fbc18a931" diff --git a/apps/pre-processing-service/pyproject.toml b/apps/pre-processing-service/pyproject.toml index 34ece3ae..38c5b029 100644 --- a/apps/pre-processing-service/pyproject.toml +++ b/apps/pre-processing-service/pyproject.toml @@ -5,15 +5,9 @@ description = "" authors = [ {name = "skip"} ] -readme = "README.md" +# readme = "README.md" requires-python = ">=3.11,<3.14" - -[[tool.poetry.source]] -name = "pytorch" -url = "https://download.pytorch.org/whl/cpu" -priority = "explicit" - [tool.poetry.dependencies] python = ">=3.11,<3.14" fastapi = ">=0.116.1,<0.117.0" @@ -28,7 +22,6 @@ bs4 = ">=0.0.2,<0.0.3" selenium = ">=4.35.0,<5.0.0" transformers = ">=4.56.0,<5.0.0" numpy = ">=2.3.2,<3.0.0" -#torch = ">=2.8.0,<3.0.0" scikit-learn = ">=1.7.1,<2.0.0" python-dotenv = ">=1.1.1,<2.0.0" mecab-python3 = ">=1.0.10,<2.0.0" @@ -41,6 +34,7 @@ google-auth-oauthlib = "^1.2.2" google-api-python-client = "^2.181.0" poetry-core=">=2.1.3,<3.0.0" dbutils=">=3.1.2,<4.0.0" +onnxruntime = "^1.22.1" [build-system] requires = ["poetry-core>=2.0.0,<3.0.0"] From 72002de6a273c0f9445b1b6a047145ebcdcd1e03 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Thu, 11 Sep 2025 17:44:19 +0900 Subject: [PATCH 02/57] =?UTF-8?q?feat:=20=EC=9C=A0=EC=82=AC=EB=8F=84=20tor?= =?UTF-8?q?ch=20=EC=82=AD=EC=A0=9C=20=ED=9B=84=20onnx=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/utils/similarity_analyzer.py | 32 +++++++++++++++---- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/apps/pre-processing-service/app/utils/similarity_analyzer.py b/apps/pre-processing-service/app/utils/similarity_analyzer.py index aec6d48b..d4e7c0c5 100644 --- a/apps/pre-processing-service/app/utils/similarity_analyzer.py +++ b/apps/pre-processing-service/app/utils/similarity_analyzer.py @@ -4,6 +4,7 @@ import onnxruntime as ort from transformers import AutoTokenizer + class SimilarityAnalyzerONNX: """ONNX 기반 텍스트 유사도 분석기""" @@ -28,7 +29,7 @@ def get_embedding(self, text: str) -> np.ndarray: ) ort_inputs = { "input_ids": inputs["input_ids"].astype(np.int64), - "attention_mask": inputs["attention_mask"].astype(np.int64) + "attention_mask": inputs["attention_mask"].astype(np.int64), } ort_outs = self.ort_session.run(None, ort_inputs) embedding = ort_outs[0][:, 0, :] # [CLS] 토큰 임베딩 @@ -41,7 +42,9 @@ def get_embedding(self, text: str) -> np.ndarray: def calculate_similarity(self, text1: str, text2: str) -> float: """두 텍스트 간 유사도 계산""" try: - logger.debug(f"유사도 계산 시작: text1='{text1[:30]}', text2='{text2[:30]}'") + logger.debug( + f"유사도 계산 시작: text1='{text1[:30]}', text2='{text2[:30]}'" + ) emb1 = self.get_embedding(text1) emb2 = self.get_embedding(text2) similarity = cosine_similarity(emb1, emb2)[0][0] @@ -51,9 +54,13 @@ def calculate_similarity(self, text1: str, text2: str) -> float: logger.error(f"유사도 계산 오류: {e}") raise - def analyze_similarity_batch(self, keyword: str, product_titles: list[str]) -> list[dict]: + def analyze_similarity_batch( + self, keyword: str, product_titles: list[str] + ) -> list[dict]: """배치 유사도 분석""" - logger.info(f"배치 유사도 분석 시작: keyword='{keyword}', titles_count={len(product_titles)}") + logger.info( + f"배치 유사도 분석 시작: keyword='{keyword}', titles_count={len(product_titles)}" + ) try: keyword_emb = self.get_embedding(keyword) results = [] @@ -62,13 +69,24 @@ def analyze_similarity_batch(self, keyword: str, product_titles: list[str]) -> l try: title_emb = self.get_embedding(title) sim = cosine_similarity(keyword_emb, title_emb)[0][0] - results.append({"index": i, "title": title, "similarity": float(sim), "score": float(sim)}) + results.append( + { + "index": i, + "title": title, + "similarity": float(sim), + "score": float(sim), + } + ) except Exception as e: logger.error(f"유사도 계산 오류 (제목: {title[:30]}): {e}") - results.append({"index": i, "title": title, "similarity": 0.0, "score": 0.0}) + results.append( + {"index": i, "title": title, "similarity": 0.0, "score": 0.0} + ) results.sort(key=lambda x: x["similarity"], reverse=True) - logger.info(f"배치 유사도 분석 완료: 총 {len(results)}개, 최고 유사도={results[0]['similarity']:.4f}") + logger.info( + f"배치 유사도 분석 완료: 총 {len(results)}개, 최고 유사도={results[0]['similarity']:.4f}" + ) return results except Exception as e: logger.error(f"배치 유사도 분석 실패: {e}") From 99e2f99e506a643ca5005de2c6eb7adbcd70c907 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Thu, 11 Sep 2025 19:27:24 +0900 Subject: [PATCH 03/57] =?UTF-8?q?chore:=20onnx=20=EB=AA=A8=EB=8D=B8=20?= =?UTF-8?q?=EC=A0=80=EC=9E=A5=EC=9A=A9=20=EB=B3=BC=EB=A5=A8=20docker-compo?= =?UTF-8?q?ser=EC=97=90=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docker/production/docker-compose.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docker/production/docker-compose.yml b/docker/production/docker-compose.yml index fa3ca0cc..7ff93250 100644 --- a/docker/production/docker-compose.yml +++ b/docker/production/docker-compose.yml @@ -22,6 +22,12 @@ services: - app-network env_file: - .env.prod + volumes: + - onnx_models:/app/models # ONNX 모델 저장용 볼륨 -> 서버에 만들어야함 + +volumes: + onnx_models: + driver: local networks: app-network: From cab225b2298fe9c6f3f45c438f0ffe6b78b323d1 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Fri, 12 Sep 2025 09:44:19 +0900 Subject: [PATCH 04/57] =?UTF-8?q?chore:=20poetry.lock=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 poetry.lock diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..3557bc5a --- /dev/null +++ b/poetry.lock @@ -0,0 +1,7 @@ +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +package = [] + +[metadata] +lock-version = "2.1" +python-versions = ">=3.11" +content-hash = "f5666f5625d676c506924a57dc0520a1f3ed2b2c774baed3dc85353594f8473d" From c5ac0ab92de78c7e4ccb17b0261e037991ac6796 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Fri, 12 Sep 2025 10:02:12 +0900 Subject: [PATCH 05/57] =?UTF-8?q?chore:=20poetry.lock=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/pre-processing-service/poetry.lock | 2189 +++++++++++++++++++++++ 1 file changed, 2189 insertions(+) diff --git a/apps/pre-processing-service/poetry.lock b/apps/pre-processing-service/poetry.lock index e69de29b..49d36b65 100644 --- a/apps/pre-processing-service/poetry.lock +++ b/apps/pre-processing-service/poetry.lock @@ -0,0 +1,2189 @@ +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.10.0" +description = "High-level concurrency and networking framework on top of asyncio or Trio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1"}, + {file = "anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} + +[package.extras] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "asyncpg" +version = "0.30.0" +description = "An asyncio PostgreSQL driver" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, + {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, + {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f"}, + {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af"}, + {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75"}, + {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f"}, + {file = "asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf"}, + {file = "asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50"}, + {file = "asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a"}, + {file = "asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed"}, + {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a"}, + {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956"}, + {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056"}, + {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454"}, + {file = "asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d"}, + {file = "asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f"}, + {file = "asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e"}, + {file = "asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a"}, + {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3"}, + {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737"}, + {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a"}, + {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af"}, + {file = "asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e"}, + {file = "asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305"}, + {file = "asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70"}, + {file = "asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3"}, + {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33"}, + {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4"}, + {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4"}, + {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba"}, + {file = "asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590"}, + {file = "asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e"}, + {file = "asyncpg-0.30.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29ff1fc8b5bf724273782ff8b4f57b0f8220a1b2324184846b39d1ab4122031d"}, + {file = "asyncpg-0.30.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64e899bce0600871b55368b8483e5e3e7f1860c9482e7f12e0a771e747988168"}, + {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b290f4726a887f75dcd1b3006f484252db37602313f806e9ffc4e5996cfe5cb"}, + {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f86b0e2cd3f1249d6fe6fd6cfe0cd4538ba994e2d8249c0491925629b9104d0f"}, + {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:393af4e3214c8fa4c7b86da6364384c0d1b3298d45803375572f415b6f673f38"}, + {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fd4406d09208d5b4a14db9a9dbb311b6d7aeeab57bded7ed2f8ea41aeef39b34"}, + {file = "asyncpg-0.30.0-cp38-cp38-win32.whl", hash = "sha256:0b448f0150e1c3b96cb0438a0d0aa4871f1472e58de14a3ec320dbb2798fb0d4"}, + {file = "asyncpg-0.30.0-cp38-cp38-win_amd64.whl", hash = "sha256:f23b836dd90bea21104f69547923a02b167d999ce053f3d502081acea2fba15b"}, + {file = "asyncpg-0.30.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f4e83f067b35ab5e6371f8a4c93296e0439857b4569850b178a01385e82e9ad"}, + {file = "asyncpg-0.30.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5df69d55add4efcd25ea2a3b02025b669a285b767bfbf06e356d68dbce4234ff"}, + {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3479a0d9a852c7c84e822c073622baca862d1217b10a02dd57ee4a7a081f708"}, + {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26683d3b9a62836fad771a18ecf4659a30f348a561279d6227dab96182f46144"}, + {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1b982daf2441a0ed314bd10817f1606f1c28b1136abd9e4f11335358c2c631cb"}, + {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1c06a3a50d014b303e5f6fc1e5f95eb28d2cee89cf58384b700da621e5d5e547"}, + {file = "asyncpg-0.30.0-cp39-cp39-win32.whl", hash = "sha256:1b11a555a198b08f5c4baa8f8231c74a366d190755aa4f99aacec5970afe929a"}, + {file = "asyncpg-0.30.0-cp39-cp39-win_amd64.whl", hash = "sha256:8b684a3c858a83cd876f05958823b68e8d14ec01bb0c0d14a6704c5bf9711773"}, + {file = "asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851"}, +] + +[package.extras] +docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] +gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] +test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""] + +[[package]] +name = "attrs" +version = "25.3.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, +] + +[package.extras] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] + +[[package]] +name = "beautifulsoup4" +version = "4.13.5" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.7.0" +groups = ["main"] +files = [ + {file = "beautifulsoup4-4.13.5-py3-none-any.whl", hash = "sha256:642085eaa22233aceadff9c69651bc51e8bf3f874fb6d7104ece2beb24b47c4a"}, + {file = "beautifulsoup4-4.13.5.tar.gz", hash = "sha256:5e70131382930e7c3de33450a2f54a63d5e4b19386eab43a5b34d594268f3695"}, +] + +[package.dependencies] +soupsieve = ">1.2" +typing-extensions = ">=4.0.0" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "black" +version = "25.1.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, + {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, + {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, + {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, + {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, + {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, + {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, + {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, + {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, + {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, + {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, + {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, + {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, + {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, + {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, + {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, + {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, + {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, + {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, + {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, + {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, + {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "bs4" +version = "0.0.2" +description = "Dummy package for Beautiful Soup (beautifulsoup4)" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc"}, + {file = "bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925"}, +] + +[package.dependencies] +beautifulsoup4 = "*" + +[[package]] +name = "cachetools" +version = "5.5.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"}, + {file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"}, +] + +[[package]] +name = "cffi" +version = "2.0.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "os_name == \"nt\" and implementation_name != \"pypy\"" +files = [ + {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"}, + {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"}, + {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"}, + {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"}, + {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"}, + {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"}, + {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"}, + {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"}, + {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"}, + {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"}, + {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"}, + {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"}, + {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"}, + {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"}, + {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"}, + {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"}, + {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"}, + {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"}, + {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"}, + {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"}, + {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"}, +] + +[package.dependencies] +pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} + +[[package]] +name = "charset-normalizer" +version = "3.4.3" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0f2be7e0cf7754b9a30eb01f4295cc3d4358a479843b31f328afd210e2c7598c"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c60e092517a73c632ec38e290eba714e9627abe9d301c8c8a12ec32c314a2a4b"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:252098c8c7a873e17dd696ed98bbe91dbacd571da4b87df3736768efa7a792e4"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3653fad4fe3ed447a596ae8638b437f827234f01a8cd801842e43f3d0a6b281b"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8999f965f922ae054125286faf9f11bc6932184b93011d138925a1773830bbe9"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d95bfb53c211b57198bb91c46dd5a2d8018b3af446583aab40074bf7988401cb"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:5b413b0b1bfd94dbf4023ad6945889f374cd24e3f62de58d6bb102c4d9ae534a"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:b5e3b2d152e74e100a9e9573837aba24aab611d39428ded46f4e4022ea7d1942"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a2d08ac246bb48479170408d6c19f6385fa743e7157d716e144cad849b2dd94b"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-win32.whl", hash = "sha256:ec557499516fc90fd374bf2e32349a2887a876fbf162c160e3c01b6849eaf557"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:5d8d01eac18c423815ed4f4a2ec3b439d654e55ee4ad610e153cf02faf67ea40"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:70bfc5f2c318afece2f5838ea5e4c3febada0be750fcf4775641052bbba14d05"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23b6b24d74478dc833444cbd927c338349d6ae852ba53a0d02a2de1fce45b96e"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:34a7f768e3f985abdb42841e20e17b330ad3aaf4bb7e7aeeb73db2e70f077b99"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb731e5deb0c7ef82d698b0f4c5bb724633ee2a489401594c5c88b02e6cb15f7"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:257f26fed7d7ff59921b78244f3cd93ed2af1800ff048c33f624c87475819dd7"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1ef99f0456d3d46a50945c98de1774da86f8e992ab5c77865ea8b8195341fc19"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2c322db9c8c89009a990ef07c3bcc9f011a3269bc06782f916cd3d9eed7c9312"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:511729f456829ef86ac41ca78c63a5cb55240ed23b4b737faca0eb1abb1c41bc"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88ab34806dea0671532d3f82d82b85e8fc23d7b2dd12fa837978dad9bb392a34"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-win32.whl", hash = "sha256:16a8770207946ac75703458e2c743631c79c59c5890c80011d536248f8eaa432"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:d22dbedd33326a4a5190dd4fe9e9e693ef12160c77382d9e87919bce54f3d4ca"}, + {file = "charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a"}, + {file = "charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14"}, +] + +[[package]] +name = "click" +version = "8.2.1" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main", "dev"] +files = [ + {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, + {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] +markers = "platform_system == \"Windows\" or sys_platform == \"win32\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coloredlogs" +version = "15.0.1" +description = "Colored terminal output for Python's logging module" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +files = [ + {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, + {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, +] + +[package.dependencies] +humanfriendly = ">=9.1" + +[package.extras] +cron = ["capturer (>=2.4)"] + +[[package]] +name = "dbutils" +version = "3.1.2" +description = "Database connections for multi-threaded environments." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "dbutils-3.1.2-py3-none-any.whl", hash = "sha256:0cb388a89eeecf04089aef113a7007c3fac9199e9580c8549829f954870c403a"}, + {file = "dbutils-3.1.2.tar.gz", hash = "sha256:160b5788154f1adeddc61080daff1530b4df2ba0d45af1c3bfbac76db24186b3"}, +] + +[package.extras] +docs = ["docutils"] +pg = ["PyGreSQL (>=5)"] +tests = ["pytest (>=7)", "ruff"] + +[[package]] +name = "fastapi" +version = "0.116.1" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565"}, + {file = "fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.40.0,<0.48.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] +standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "flatbuffers" +version = "25.2.10" +description = "The FlatBuffers serialization format for Python" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051"}, + {file = "flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e"}, +] + +[[package]] +name = "google" +version = "3.0.0" +description = "Python bindings to the Google search engine." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "google-3.0.0-py2.py3-none-any.whl", hash = "sha256:889cf695f84e4ae2c55fbc0cfdaf4c1e729417fa52ab1db0485202ba173e4935"}, + {file = "google-3.0.0.tar.gz", hash = "sha256:143530122ee5130509ad5e989f0512f7cb218b2d4eddbafbad40fd10e8d8ccbe"}, +] + +[package.dependencies] +beautifulsoup4 = "*" + +[[package]] +name = "google-api-core" +version = "2.25.1" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_api_core-2.25.1-py3-none-any.whl", hash = "sha256:8a2a56c1fef82987a524371f99f3bd0143702fecc670c72e600c1cda6bf8dbb7"}, + {file = "google_api_core-2.25.1.tar.gz", hash = "sha256:d2aaa0b13c78c61cb3f4282c464c046e45fbd75755683c9c525e6e8f7ed0a5e8"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.0" +googleapis-common-protos = ">=1.56.2,<2.0.0" +proto-plus = [ + {version = ">=1.25.0,<2.0.0", markers = "python_version >= \"3.13\""}, + {version = ">=1.22.3,<2.0.0", markers = "python_version < \"3.13\""}, +] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" +requests = ">=2.18.0,<3.0.0" + +[package.extras] +async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.0)"] +grpc = ["grpcio (>=1.33.2,<2.0.0)", "grpcio (>=1.49.1,<2.0.0) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.0)", "grpcio-status (>=1.49.1,<2.0.0) ; python_version >= \"3.11\""] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] + +[[package]] +name = "google-api-python-client" +version = "2.181.0" +description = "Google API Client Library for Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_api_python_client-2.181.0-py3-none-any.whl", hash = "sha256:348730e3ece46434a01415f3d516d7a0885c8e624ce799f50f2d4d86c2475fb7"}, + {file = "google_api_python_client-2.181.0.tar.gz", hash = "sha256:d7060962a274a16a2c6f8fb4b1569324dbff11bfbca8eb050b88ead1dd32261c"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0" +google-auth = ">=1.32.0,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0" +google-auth-httplib2 = ">=0.2.0,<1.0.0" +httplib2 = ">=0.19.0,<1.0.0" +uritemplate = ">=3.0.1,<5" + +[[package]] +name = "google-auth" +version = "2.40.3" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_auth-2.40.3-py2.py3-none-any.whl", hash = "sha256:1370d4593e86213563547f97a92752fc658456fe4514c809544f330fed45a7ca"}, + {file = "google_auth-2.40.3.tar.gz", hash = "sha256:500c3a29adedeb36ea9cf24b8d10858e152f2412e3ca37829b3fa18e33d63b77"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] +pyjwt = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyjwt (>=2.0)"] +pyopenssl = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0)"] +testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "flask", "freezegun", "grpcio", "mock", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] +urllib3 = ["packaging", "urllib3"] + +[[package]] +name = "google-auth-httplib2" +version = "0.2.0" +description = "Google Authentication Library: httplib2 transport" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"}, + {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"}, +] + +[package.dependencies] +google-auth = "*" +httplib2 = ">=0.19.0" + +[[package]] +name = "google-auth-oauthlib" +version = "1.2.2" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "google_auth_oauthlib-1.2.2-py3-none-any.whl", hash = "sha256:fd619506f4b3908b5df17b65f39ca8d66ea56986e5472eb5978fd8f3786f00a2"}, + {file = "google_auth_oauthlib-1.2.2.tar.gz", hash = "sha256:11046fb8d3348b296302dd939ace8af0a724042e8029c1b872d87fabc9f41684"}, +] + +[package.dependencies] +google-auth = ">=2.15.0" +requests-oauthlib = ">=0.7.0" + +[package.extras] +tool = ["click (>=6.0.0)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.70.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8"}, + {file = "googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257"}, +] + +[package.dependencies] +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0)"] + +[[package]] +name = "greenlet" +version = "3.2.4" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" +files = [ + {file = "greenlet-3.2.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8c68325b0d0acf8d91dde4e6f930967dd52a5302cd4062932a6b2e7c2969f47c"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:94385f101946790ae13da500603491f04a76b6e4c059dab271b3ce2e283b2590"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f10fd42b5ee276335863712fa3da6608e93f70629c631bf77145021600abc23c"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c8c9e331e58180d0d83c5b7999255721b725913ff6bc6cf39fa2a45841a4fd4b"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58b97143c9cc7b86fc458f215bd0932f1757ce649e05b640fea2e79b54cedb31"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f"}, + {file = "greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c"}, + {file = "greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa"}, + {file = "greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9"}, + {file = "greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f"}, + {file = "greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02"}, + {file = "greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae"}, + {file = "greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b"}, + {file = "greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c17b6b34111ea72fc5a4e4beec9711d2226285f0386ea83477cbb97c30a3f3a5"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337"}, + {file = "greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01"}, + {file = "greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:18d9260df2b5fbf41ae5139e1be4e796d99655f023a636cd0e11e6406cca7d58"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:671df96c1f23c4a0d4077a325483c1503c96a1b7d9db26592ae770daa41233d4"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:16458c245a38991aa19676900d48bd1a6f2ce3e16595051a4db9d012154e8433"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98"}, + {file = "greenlet-3.2.4-cp39-cp39-win32.whl", hash = "sha256:65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b"}, + {file = "greenlet-3.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb"}, + {file = "greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil", "setuptools"] + +[[package]] +name = "gunicorn" +version = "23.0.0" +description = "WSGI HTTP Server for UNIX" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d"}, + {file = "gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec"}, +] + +[package.dependencies] +packaging = "*" + +[package.extras] +eventlet = ["eventlet (>=0.24.1,!=0.36.0)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"] +tornado = ["tornado (>=0.2)"] + +[[package]] +name = "h11" +version = "0.16.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.16" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httplib2" +version = "0.31.0" +description = "A comprehensive HTTP client library." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "httplib2-0.31.0-py3-none-any.whl", hash = "sha256:b9cd78abea9b4e43a7714c6e0f8b6b8561a6fc1e95d5dbd367f5bf0ef35f5d24"}, + {file = "httplib2-0.31.0.tar.gz", hash = "sha256:ac7ab497c50975147d4f7b1ade44becc7df2f8954d42b38b3d69c515f531135c"}, +] + +[package.dependencies] +pyparsing = ">=3.0.4,<4" + +[[package]] +name = "httpx" +version = "0.28.1" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" + +[package.extras] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "humanfriendly" +version = "10.0" +description = "Human friendly output for text interfaces using Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +files = [ + {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, + {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, +] + +[package.dependencies] +pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "joblib" +version = "1.5.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241"}, + {file = "joblib-1.5.2.tar.gz", hash = "sha256:3faa5c39054b2f03ca547da9b2f52fde67c06240c31853f306aea97f13647b55"}, +] + +[[package]] +name = "loguru" +version = "0.7.3" +description = "Python logging made (stupidly) simple" +optional = false +python-versions = "<4.0,>=3.5" +groups = ["main"] +files = [ + {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, + {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] + +[[package]] +name = "mecab-python3" +version = "1.0.10" +description = "Python wrapper for the MeCab morphological analyzer for Japanese" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "mecab_python3-1.0.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ddeeb7e40348066cbcf980dffa19bc84e087bb0fb452ce149defc11747f52f85"}, + {file = "mecab_python3-1.0.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1acb9f47108170a43549637f3f45449c7018d56e91ca5fc8ad56bbcd8288848c"}, + {file = "mecab_python3-1.0.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e487498dc7231926230944ad04e40406d23499240fd35273d8d2c4f775dcc162"}, + {file = "mecab_python3-1.0.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a2924d9ee1a7eefe0601edf16d2b63c5519e3403b319cfc9d1eda4bf978f6d9"}, + {file = "mecab_python3-1.0.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:069c176c02b6bec3fdc9e00c42138dc77ef4b683908b6909808bc7528d2996bc"}, + {file = "mecab_python3-1.0.10-cp310-cp310-win_amd64.whl", hash = "sha256:63cd0a65835257a1fcb88f25a6eaf1a8e472990a9d3f7d08300c5cccf8973931"}, + {file = "mecab_python3-1.0.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ebc8bbdb7e0c616e1467b02cadc3c7a764912dec241b31a14c90b1c1ac58afc8"}, + {file = "mecab_python3-1.0.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d66bbda223e64bf1eb320809b5d7e21fc6b045ccc14e07232d8592dd40b1a29"}, + {file = "mecab_python3-1.0.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9eed9b626a82eb26e571e45832b7c03b46e250e57c70d7309aa0c28c0fb95d47"}, + {file = "mecab_python3-1.0.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01c1123fb64fb67d29e7221a9cba36b589b795683bd94e762d87385a9633de95"}, + {file = "mecab_python3-1.0.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da59058da7459457f14382ddb6a2bb4a80176d0dcfa3eb835c53abd11e5aa97d"}, + {file = "mecab_python3-1.0.10-cp311-cp311-win_amd64.whl", hash = "sha256:1eca068650d9f228072820ce015eb5831b9114afe6cc0f381208eaa2e1f23f0f"}, + {file = "mecab_python3-1.0.10-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fb21b38fea3da3a3c893b6af34f9d34e4846c30f7d2f76fe58beee195963fbf3"}, + {file = "mecab_python3-1.0.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7fa96813dca31ad1517a1c5921b5620713fdefea072795ec9de31425fcf2c4e1"}, + {file = "mecab_python3-1.0.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:134e2c68a783f545bc8373601469d017a13d9b7cac46d243ec1bbfb2c94639a3"}, + {file = "mecab_python3-1.0.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d9ff1c7a7ec4f42c98d74db71bc9b1d513db4cf676a023665ae40197f2da040"}, + {file = "mecab_python3-1.0.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2953a3e53fa269f2e1b109de3a55fc7668e9e566f0340a69c2202a37f0447691"}, + {file = "mecab_python3-1.0.10-cp312-cp312-win_amd64.whl", hash = "sha256:308cbec46e426d53bde1f97a95ea655d3e6fcababe0c444dd74c9d3f8105a179"}, + {file = "mecab_python3-1.0.10-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:711ee9a7ba27aa6988b580951671e5966d7b9aa16cae453d17a5e149d295941c"}, + {file = "mecab_python3-1.0.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cc4e90d23b57e1ea4bf0ecc57cf7cdbc432164398b67ad113256bc20ed52154"}, + {file = "mecab_python3-1.0.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4b90261aa514f29c6e05bd99717a02eeb9be3d7ea0a0be01f65ce0d86c572c68"}, + {file = "mecab_python3-1.0.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:894d87d708545314359cfd1b062238c2756d8f985b4c3fe7cabdf111f533a367"}, + {file = "mecab_python3-1.0.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bd7b86dd39a068dadf06ba8dd717ec1defb2cea181c5cbc6c54b1adb6dd0d4b"}, + {file = "mecab_python3-1.0.10-cp313-cp313-win_amd64.whl", hash = "sha256:3528ef81cc4c9506ae3b273958fe2314aa1022a8db64640e631e09fd3e1af97b"}, + {file = "mecab_python3-1.0.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c7da20d1ede231645e2be96a202f15419cb508b4b21f3c466bc5848f5956af27"}, + {file = "mecab_python3-1.0.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1e385987f2ef3f617ee87bf2ca555e10c468c156c71bfcac7182202df261f4d2"}, + {file = "mecab_python3-1.0.10-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9666fa3e116768d81c3d20f13bb05daf8a474919312cc5239180ed6f5c318e80"}, + {file = "mecab_python3-1.0.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:06f9259bd2ffb4a71e99712ea845b579674a2be7b245b88b03f28a390ab13dea"}, + {file = "mecab_python3-1.0.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8d16cf163c9fe568a42a31e99a60cdaa97d76124d04a8daa2bb2b93f18d08107"}, + {file = "mecab_python3-1.0.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82c175f1ae970b3baa3589e29f3946a1a83b76a48245ea103558abdbfb3398b1"}, + {file = "mecab_python3-1.0.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a20e0c94bb24e36ec73d1e6ae91608cd913174f8aa1d8195b218d2d77aeb3ccc"}, + {file = "mecab_python3-1.0.10-cp38-cp38-win_amd64.whl", hash = "sha256:9af3ef731dfbd1f0a97f4a91ebfea2454dc3e8fa9e42423912eb6628f2acdfec"}, + {file = "mecab_python3-1.0.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c72f1c4c582f7f86aa9454694719e7873f80830e300df0d71f6b38ff9c0f94ea"}, + {file = "mecab_python3-1.0.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1c45dd85ee584326d23e1ec4d6d4c23ac39e88e5bc0442b4e81b178c59d1c148"}, + {file = "mecab_python3-1.0.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b6845fb4bf4771018a10a6c455dc4cb3e0590c8ac55d25cddfe85138a72bbd2"}, + {file = "mecab_python3-1.0.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30f4cca5992c7c5ac3767d6e21c235d02104eb11b94b60361494509a72d92a5"}, + {file = "mecab_python3-1.0.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bd723e757d321135d38ab383639c148e20eb65468517398fff26eb89344d0b5"}, + {file = "mecab_python3-1.0.10-cp39-cp39-win_amd64.whl", hash = "sha256:f93201fa2c4d7e03b3cc25ffd52a8c4ee207db874258d5143ece8b457e22a885"}, + {file = "mecab_python3-1.0.10.tar.gz", hash = "sha256:21cd4416043e9a993fcfb986dde93e4366a07543dd95849b5ef2e50c9a9afcce"}, +] + +[package.extras] +unidic = ["unidic"] +unidic-lite = ["unidic-lite"] + +[[package]] +name = "mpmath" +version = "1.3.0" +description = "Python library for arbitrary-precision floating-point arithmetic" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, + {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, +] + +[package.extras] +develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] +docs = ["sphinx"] +gmpy = ["gmpy2 (>=2.1.0a4) ; platform_python_implementation != \"PyPy\""] +tests = ["pytest (>=4.6)"] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "numpy" +version = "2.3.3" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.11" +groups = ["main"] +files = [ + {file = "numpy-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ffc4f5caba7dfcbe944ed674b7eef683c7e94874046454bb79ed7ee0236f59d"}, + {file = "numpy-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7e946c7170858a0295f79a60214424caac2ffdb0063d4d79cb681f9aa0aa569"}, + {file = "numpy-2.3.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:cd4260f64bc794c3390a63bf0728220dd1a68170c169088a1e0dfa2fde1be12f"}, + {file = "numpy-2.3.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:f0ddb4b96a87b6728df9362135e764eac3cfa674499943ebc44ce96c478ab125"}, + {file = "numpy-2.3.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:afd07d377f478344ec6ca2b8d4ca08ae8bd44706763d1efb56397de606393f48"}, + {file = "numpy-2.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bc92a5dedcc53857249ca51ef29f5e5f2f8c513e22cfb90faeb20343b8c6f7a6"}, + {file = "numpy-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7af05ed4dc19f308e1d9fc759f36f21921eb7bbfc82843eeec6b2a2863a0aefa"}, + {file = "numpy-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:433bf137e338677cebdd5beac0199ac84712ad9d630b74eceeb759eaa45ddf30"}, + {file = "numpy-2.3.3-cp311-cp311-win32.whl", hash = "sha256:eb63d443d7b4ffd1e873f8155260d7f58e7e4b095961b01c91062935c2491e57"}, + {file = "numpy-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:ec9d249840f6a565f58d8f913bccac2444235025bbb13e9a4681783572ee3caa"}, + {file = "numpy-2.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:74c2a948d02f88c11a3c075d9733f1ae67d97c6bdb97f2bb542f980458b257e7"}, + {file = "numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cfdd09f9c84a1a934cde1eec2267f0a43a7cd44b2cca4ff95b7c0d14d144b0bf"}, + {file = "numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb32e3cf0f762aee47ad1ddc6672988f7f27045b0783c887190545baba73aa25"}, + {file = "numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396b254daeb0a57b1fe0ecb5e3cff6fa79a380fa97c8f7781a6d08cd429418fe"}, + {file = "numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:067e3d7159a5d8f8a0b46ee11148fc35ca9b21f61e3c49fbd0a027450e65a33b"}, + {file = "numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c02d0629d25d426585fb2e45a66154081b9fa677bc92a881ff1d216bc9919a8"}, + {file = "numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9192da52b9745f7f0766531dcfa978b7763916f158bb63bdb8a1eca0068ab20"}, + {file = "numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cd7de500a5b66319db419dc3c345244404a164beae0d0937283b907d8152e6ea"}, + {file = "numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:93d4962d8f82af58f0b2eb85daaf1b3ca23fe0a85d0be8f1f2b7bb46034e56d7"}, + {file = "numpy-2.3.3-cp312-cp312-win32.whl", hash = "sha256:5534ed6b92f9b7dca6c0a19d6df12d41c68b991cef051d108f6dbff3babc4ebf"}, + {file = "numpy-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:497d7cad08e7092dba36e3d296fe4c97708c93daf26643a1ae4b03f6294d30eb"}, + {file = "numpy-2.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:ca0309a18d4dfea6fc6262a66d06c26cfe4640c3926ceec90e57791a82b6eee5"}, + {file = "numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f5415fb78995644253370985342cd03572ef8620b934da27d77377a2285955bf"}, + {file = "numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d00de139a3324e26ed5b95870ce63be7ec7352171bc69a4cf1f157a48e3eb6b7"}, + {file = "numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9dc13c6a5829610cc07422bc74d3ac083bd8323f14e2827d992f9e52e22cd6a6"}, + {file = "numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d79715d95f1894771eb4e60fb23f065663b2298f7d22945d66877aadf33d00c7"}, + {file = "numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952cfd0748514ea7c3afc729a0fc639e61655ce4c55ab9acfab14bda4f402b4c"}, + {file = "numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b83648633d46f77039c29078751f80da65aa64d5622a3cd62aaef9d835b6c93"}, + {file = "numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b001bae8cea1c7dfdb2ae2b017ed0a6f2102d7a70059df1e338e307a4c78a8ae"}, + {file = "numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e9aced64054739037d42fb84c54dd38b81ee238816c948c8f3ed134665dcd86"}, + {file = "numpy-2.3.3-cp313-cp313-win32.whl", hash = "sha256:9591e1221db3f37751e6442850429b3aabf7026d3b05542d102944ca7f00c8a8"}, + {file = "numpy-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f0dadeb302887f07431910f67a14d57209ed91130be0adea2f9793f1a4f817cf"}, + {file = "numpy-2.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:3c7cf302ac6e0b76a64c4aecf1a09e51abd9b01fc7feee80f6c43e3ab1b1dbc5"}, + {file = "numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:eda59e44957d272846bb407aad19f89dc6f58fecf3504bd144f4c5cf81a7eacc"}, + {file = "numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:823d04112bc85ef5c4fda73ba24e6096c8f869931405a80aa8b0e604510a26bc"}, + {file = "numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:40051003e03db4041aa325da2a0971ba41cf65714e65d296397cc0e32de6018b"}, + {file = "numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ee9086235dd6ab7ae75aba5662f582a81ced49f0f1c6de4260a78d8f2d91a19"}, + {file = "numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94fcaa68757c3e2e668ddadeaa86ab05499a70725811e582b6a9858dd472fb30"}, + {file = "numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da1a74b90e7483d6ce5244053399a614b1d6b7bc30a60d2f570e5071f8959d3e"}, + {file = "numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2990adf06d1ecee3b3dcbb4977dfab6e9f09807598d647f04d385d29e7a3c3d3"}, + {file = "numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ed635ff692483b8e3f0fcaa8e7eb8a75ee71aa6d975388224f70821421800cea"}, + {file = "numpy-2.3.3-cp313-cp313t-win32.whl", hash = "sha256:a333b4ed33d8dc2b373cc955ca57babc00cd6f9009991d9edc5ddbc1bac36bcd"}, + {file = "numpy-2.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:4384a169c4d8f97195980815d6fcad04933a7e1ab3b530921c3fef7a1c63426d"}, + {file = "numpy-2.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:75370986cc0bc66f4ce5110ad35aae6d182cc4ce6433c40ad151f53690130bf1"}, + {file = "numpy-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cd052f1fa6a78dee696b58a914b7229ecfa41f0a6d96dc663c1220a55e137593"}, + {file = "numpy-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:414a97499480067d305fcac9716c29cf4d0d76db6ebf0bf3cbce666677f12652"}, + {file = "numpy-2.3.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:50a5fe69f135f88a2be9b6ca0481a68a136f6febe1916e4920e12f1a34e708a7"}, + {file = "numpy-2.3.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:b912f2ed2b67a129e6a601e9d93d4fa37bef67e54cac442a2f588a54afe5c67a"}, + {file = "numpy-2.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9e318ee0596d76d4cb3d78535dc005fa60e5ea348cd131a51e99d0bdbe0b54fe"}, + {file = "numpy-2.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce020080e4a52426202bdb6f7691c65bb55e49f261f31a8f506c9f6bc7450421"}, + {file = "numpy-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e6687dc183aa55dae4a705b35f9c0f8cb178bcaa2f029b241ac5356221d5c021"}, + {file = "numpy-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d8f3b1080782469fdc1718c4ed1d22549b5fb12af0d57d35e992158a772a37cf"}, + {file = "numpy-2.3.3-cp314-cp314-win32.whl", hash = "sha256:cb248499b0bc3be66ebd6578b83e5acacf1d6cb2a77f2248ce0e40fbec5a76d0"}, + {file = "numpy-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:691808c2b26b0f002a032c73255d0bd89751425f379f7bcd22d140db593a96e8"}, + {file = "numpy-2.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:9ad12e976ca7b10f1774b03615a2a4bab8addce37ecc77394d8e986927dc0dfe"}, + {file = "numpy-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9cc48e09feb11e1db00b320e9d30a4151f7369afb96bd0e48d942d09da3a0d00"}, + {file = "numpy-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:901bf6123879b7f251d3631967fd574690734236075082078e0571977c6a8e6a"}, + {file = "numpy-2.3.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:7f025652034199c301049296b59fa7d52c7e625017cae4c75d8662e377bf487d"}, + {file = "numpy-2.3.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:533ca5f6d325c80b6007d4d7fb1984c303553534191024ec6a524a4c92a5935a"}, + {file = "numpy-2.3.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0edd58682a399824633b66885d699d7de982800053acf20be1eaa46d92009c54"}, + {file = "numpy-2.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:367ad5d8fbec5d9296d18478804a530f1191e24ab4d75ab408346ae88045d25e"}, + {file = "numpy-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8f6ac61a217437946a1fa48d24c47c91a0c4f725237871117dea264982128097"}, + {file = "numpy-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:179a42101b845a816d464b6fe9a845dfaf308fdfc7925387195570789bb2c970"}, + {file = "numpy-2.3.3-cp314-cp314t-win32.whl", hash = "sha256:1250c5d3d2562ec4174bce2e3a1523041595f9b651065e4a4473f5f48a6bc8a5"}, + {file = "numpy-2.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:b37a0b2e5935409daebe82c1e42274d30d9dd355852529eab91dab8dcca7419f"}, + {file = "numpy-2.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:78c9f6560dc7e6b3990e32df7ea1a50bbd0e2a111e05209963f5ddcab7073b0b"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1e02c7159791cd481e1e6d5ddd766b62a4d5acf8df4d4d1afe35ee9c5c33a41e"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:dca2d0fc80b3893ae72197b39f69d55a3cd8b17ea1b50aa4c62de82419936150"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:99683cbe0658f8271b333a1b1b4bb3173750ad59c0c61f5bbdc5b318918fffe3"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d9d537a39cc9de668e5cd0e25affb17aec17b577c6b3ae8a3d866b479fbe88d0"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8596ba2f8af5f93b01d97563832686d20206d303024777f6dfc2e7c7c3f1850e"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1ec5615b05369925bd1125f27df33f3b6c8bc10d788d5999ecd8769a1fa04db"}, + {file = "numpy-2.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:2e267c7da5bf7309670523896df97f93f6e469fb931161f483cd6882b3b1a5dc"}, + {file = "numpy-2.3.3.tar.gz", hash = "sha256:ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029"}, +] + +[[package]] +name = "oauthlib" +version = "3.3.1" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1"}, + {file = "oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "onnxruntime" +version = "1.22.1" +description = "ONNX Runtime is a runtime accelerator for Machine Learning models" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "onnxruntime-1.22.1-cp310-cp310-macosx_13_0_universal2.whl", hash = "sha256:80e7f51da1f5201c1379b8d6ef6170505cd800e40da216290f5e06be01aadf95"}, + {file = "onnxruntime-1.22.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89ddfdbbdaf7e3a59515dee657f6515601d55cb21a0f0f48c81aefc54ff1b73"}, + {file = "onnxruntime-1.22.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bddc75868bcf6f9ed76858a632f65f7b1846bdcefc6d637b1e359c2c68609964"}, + {file = "onnxruntime-1.22.1-cp310-cp310-win_amd64.whl", hash = "sha256:01e2f21b2793eb0c8642d2be3cee34cc7d96b85f45f6615e4e220424158877ce"}, + {file = "onnxruntime-1.22.1-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:f4581bccb786da68725d8eac7c63a8f31a89116b8761ff8b4989dc58b61d49a0"}, + {file = "onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7ae7526cf10f93454beb0f751e78e5cb7619e3b92f9fc3bd51aa6f3b7a8977e5"}, + {file = "onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f6effa1299ac549a05c784d50292e3378dbbf010346ded67400193b09ddc2f04"}, + {file = "onnxruntime-1.22.1-cp311-cp311-win_amd64.whl", hash = "sha256:f28a42bb322b4ca6d255531bb334a2b3e21f172e37c1741bd5e66bc4b7b61f03"}, + {file = "onnxruntime-1.22.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:a938d11c0dc811badf78e435daa3899d9af38abee950d87f3ab7430eb5b3cf5a"}, + {file = "onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:984cea2a02fcc5dfea44ade9aca9fe0f7a8a2cd6f77c258fc4388238618f3928"}, + {file = "onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d39a530aff1ec8d02e365f35e503193991417788641b184f5b1e8c9a6d5ce8d"}, + {file = "onnxruntime-1.22.1-cp312-cp312-win_amd64.whl", hash = "sha256:6a64291d57ea966a245f749eb970f4fa05a64d26672e05a83fdb5db6b7d62f87"}, + {file = "onnxruntime-1.22.1-cp313-cp313-macosx_13_0_universal2.whl", hash = "sha256:d29c7d87b6cbed8fecfd09dca471832384d12a69e1ab873e5effbb94adc3e966"}, + {file = "onnxruntime-1.22.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:460487d83b7056ba98f1f7bac80287224c31d8149b15712b0d6f5078fcc33d0f"}, + {file = "onnxruntime-1.22.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b0c37070268ba4e02a1a9d28560cd00cd1e94f0d4f275cbef283854f861a65fa"}, + {file = "onnxruntime-1.22.1-cp313-cp313-win_amd64.whl", hash = "sha256:70980d729145a36a05f74b573435531f55ef9503bcda81fc6c3d6b9306199982"}, + {file = "onnxruntime-1.22.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33a7980bbc4b7f446bac26c3785652fe8730ed02617d765399e89ac7d44e0f7d"}, + {file = "onnxruntime-1.22.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7e823624b015ea879d976cbef8bfaed2f7e2cc233d7506860a76dd37f8f381"}, +] + +[package.dependencies] +coloredlogs = "*" +flatbuffers = "*" +numpy = ">=1.21.6" +packaging = "*" +protobuf = "*" +sympy = "*" + +[[package]] +name = "outcome" +version = "1.3.0.post0" +description = "Capture the outcome of Python function calls." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b"}, + {file = "outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8"}, +] + +[package.dependencies] +attrs = ">=19.2.0" + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.4.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85"}, + {file = "platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + +[[package]] +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "poetry-core" +version = "2.1.3" +description = "Poetry PEP 517 Build Backend" +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "poetry_core-2.1.3-py3-none-any.whl", hash = "sha256:2c704f05016698a54ca1d327f46ce2426d72eaca6ff614132c8477c292266771"}, + {file = "poetry_core-2.1.3.tar.gz", hash = "sha256:0522a015477ed622c89aad56a477a57813cace0c8e7ff2a2906b7ef4a2e296a4"}, +] + +[[package]] +name = "proto-plus" +version = "1.26.1" +description = "Beautiful, Pythonic protocol buffers" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66"}, + {file = "proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<7.0.0" + +[package.extras] +testing = ["google-api-core (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "6.32.1" +description = "" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "protobuf-6.32.1-cp310-abi3-win32.whl", hash = "sha256:a8a32a84bc9f2aad712041b8b366190f71dde248926da517bde9e832e4412085"}, + {file = "protobuf-6.32.1-cp310-abi3-win_amd64.whl", hash = "sha256:b00a7d8c25fa471f16bc8153d0e53d6c9e827f0953f3c09aaa4331c718cae5e1"}, + {file = "protobuf-6.32.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d8c7e6eb619ffdf105ee4ab76af5a68b60a9d0f66da3ea12d1640e6d8dab7281"}, + {file = "protobuf-6.32.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:2f5b80a49e1eb7b86d85fcd23fe92df154b9730a725c3b38c4e43b9d77018bf4"}, + {file = "protobuf-6.32.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:b1864818300c297265c83a4982fd3169f97122c299f56a56e2445c3698d34710"}, + {file = "protobuf-6.32.1-cp39-cp39-win32.whl", hash = "sha256:68ff170bac18c8178f130d1ccb94700cf72852298e016a2443bdb9502279e5f1"}, + {file = "protobuf-6.32.1-cp39-cp39-win_amd64.whl", hash = "sha256:d0975d0b2f3e6957111aa3935d08a0eb7e006b1505d825f862a1fffc8348e122"}, + {file = "protobuf-6.32.1-py3-none-any.whl", hash = "sha256:2601b779fc7d32a866c6b4404f9d42a3f67c5b9f3f15b4db3cccabe06b95c346"}, + {file = "protobuf-6.32.1.tar.gz", hash = "sha256:ee2469e4a021474ab9baafea6cd070e5bf27c7d29433504ddea1a4ee5850f68d"}, +] + +[[package]] +name = "psycopg2-binary" +version = "2.9.10" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, + {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, +] + +[package.dependencies] +pyasn1 = ">=0.6.1,<0.7.0" + +[[package]] +name = "pycparser" +version = "2.23" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "os_name == \"nt\" and implementation_name != \"pypy\" and implementation_name != \"PyPy\"" +files = [ + {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}, + {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, +] + +[[package]] +name = "pydantic" +version = "2.11.7" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, + {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.33.2" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-settings" +version = "2.10.1" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796"}, + {file = "pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee"}, +] + +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" +typing-inspection = ">=0.4.0" + +[package.extras] +aws-secrets-manager = ["boto3 (>=1.35.0)", "boto3-stubs[secretsmanager]"] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +gcp-secret-manager = ["google-cloud-secret-manager (>=2.23.1)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "pygments" +version = "2.19.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pymysql" +version = "1.1.2" +description = "Pure Python MySQL Driver" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pymysql-1.1.2-py3-none-any.whl", hash = "sha256:e6b1d89711dd51f8f74b1631fe08f039e7d76cf67a42a323d3178f0f25762ed9"}, + {file = "pymysql-1.1.2.tar.gz", hash = "sha256:4961d3e165614ae65014e361811a724e2044ad3ea3739de9903ae7c21f539f03"}, +] + +[package.extras] +ed25519 = ["PyNaCl (>=1.4.0)"] +rsa = ["cryptography"] + +[[package]] +name = "pyparsing" +version = "3.2.3" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf"}, + {file = "pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyperclip" +version = "1.9.0" +description = "A cross-platform clipboard module for Python. (Only handles plain text for now.)" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310"}, +] + +[[package]] +name = "pyreadline3" +version = "3.5.4" +description = "A python implementation of GNU readline." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, + {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, +] + +[package.extras] +dev = ["build", "flake8", "mypy", "pytest", "twine"] + +[[package]] +name = "pysocks" +version = "1.7.1" +description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +files = [ + {file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"}, + {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"}, + {file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"}, +] + +[[package]] +name = "pytest" +version = "8.4.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79"}, + {file = "pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01"}, +] + +[package.dependencies] +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +iniconfig = ">=1" +packaging = ">=20" +pluggy = ">=1.5,<2" +pygments = ">=2.7.2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dotenv" +version = "1.1.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc"}, + {file = "python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "requests" +version = "2.32.5" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, + {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset_normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=3.4" +groups = ["main"] +files = [ + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "rsa" +version = "4.9.1" +description = "Pure-Python RSA implementation" +optional = false +python-versions = "<4,>=3.6" +groups = ["main"] +files = [ + {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, + {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "scikit-learn" +version = "1.7.2" +description = "A set of python modules for machine learning and data mining" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "scikit_learn-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b33579c10a3081d076ab403df4a4190da4f4432d443521674637677dc91e61f"}, + {file = "scikit_learn-1.7.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:36749fb62b3d961b1ce4fedf08fa57a1986cd409eff2d783bca5d4b9b5fce51c"}, + {file = "scikit_learn-1.7.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7a58814265dfc52b3295b1900cfb5701589d30a8bb026c7540f1e9d3499d5ec8"}, + {file = "scikit_learn-1.7.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a847fea807e278f821a0406ca01e387f97653e284ecbd9750e3ee7c90347f18"}, + {file = "scikit_learn-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:ca250e6836d10e6f402436d6463d6c0e4d8e0234cfb6a9a47835bd392b852ce5"}, + {file = "scikit_learn-1.7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7509693451651cd7361d30ce4e86a1347493554f172b1c72a39300fa2aea79e"}, + {file = "scikit_learn-1.7.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:0486c8f827c2e7b64837c731c8feff72c0bd2b998067a8a9cbc10643c31f0fe1"}, + {file = "scikit_learn-1.7.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:89877e19a80c7b11a2891a27c21c4894fb18e2c2e077815bcade10d34287b20d"}, + {file = "scikit_learn-1.7.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8da8bf89d4d79aaec192d2bda62f9b56ae4e5b4ef93b6a56b5de4977e375c1f1"}, + {file = "scikit_learn-1.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:9b7ed8d58725030568523e937c43e56bc01cadb478fc43c042a9aca1dacb3ba1"}, + {file = "scikit_learn-1.7.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8d91a97fa2b706943822398ab943cde71858a50245e31bc71dba62aab1d60a96"}, + {file = "scikit_learn-1.7.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:acbc0f5fd2edd3432a22c69bed78e837c70cf896cd7993d71d51ba6708507476"}, + {file = "scikit_learn-1.7.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e5bf3d930aee75a65478df91ac1225ff89cd28e9ac7bd1196853a9229b6adb0b"}, + {file = "scikit_learn-1.7.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4d6e9deed1a47aca9fe2f267ab8e8fe82ee20b4526b2c0cd9e135cea10feb44"}, + {file = "scikit_learn-1.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:6088aa475f0785e01bcf8529f55280a3d7d298679f50c0bb70a2364a82d0b290"}, + {file = "scikit_learn-1.7.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b7dacaa05e5d76759fb071558a8b5130f4845166d88654a0f9bdf3eb57851b7"}, + {file = "scikit_learn-1.7.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:abebbd61ad9e1deed54cca45caea8ad5f79e1b93173dece40bb8e0c658dbe6fe"}, + {file = "scikit_learn-1.7.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:502c18e39849c0ea1a5d681af1dbcf15f6cce601aebb657aabbfe84133c1907f"}, + {file = "scikit_learn-1.7.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a4c328a71785382fe3fe676a9ecf2c86189249beff90bf85e22bdb7efaf9ae0"}, + {file = "scikit_learn-1.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:63a9afd6f7b229aad94618c01c252ce9e6fa97918c5ca19c9a17a087d819440c"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9acb6c5e867447b4e1390930e3944a005e2cb115922e693c08a323421a6966e8"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:2a41e2a0ef45063e654152ec9d8bcfc39f7afce35b08902bfe290c2498a67a6a"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98335fb98509b73385b3ab2bd0639b1f610541d3988ee675c670371d6a87aa7c"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:191e5550980d45449126e23ed1d5e9e24b2c68329ee1f691a3987476e115e09c"}, + {file = "scikit_learn-1.7.2-cp313-cp313t-win_amd64.whl", hash = "sha256:57dc4deb1d3762c75d685507fbd0bc17160144b2f2ba4ccea5dc285ab0d0e973"}, + {file = "scikit_learn-1.7.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fa8f63940e29c82d1e67a45d5297bdebbcb585f5a5a50c4914cc2e852ab77f33"}, + {file = "scikit_learn-1.7.2-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:f95dc55b7902b91331fa4e5845dd5bde0580c9cd9612b1b2791b7e80c3d32615"}, + {file = "scikit_learn-1.7.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9656e4a53e54578ad10a434dc1f993330568cfee176dff07112b8785fb413106"}, + {file = "scikit_learn-1.7.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96dc05a854add0e50d3f47a1ef21a10a595016da5b007c7d9cd9d0bffd1fcc61"}, + {file = "scikit_learn-1.7.2-cp314-cp314-win_amd64.whl", hash = "sha256:bb24510ed3f9f61476181e4db51ce801e2ba37541def12dc9333b946fc7a9cf8"}, + {file = "scikit_learn-1.7.2.tar.gz", hash = "sha256:20e9e49ecd130598f1ca38a1d85090e1a600147b9c02fa6f15d69cb53d968fda"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.22.0" +scipy = ">=1.8.0" +threadpoolctl = ">=3.1.0" + +[package.extras] +benchmark = ["matplotlib (>=3.5.0)", "memory_profiler (>=0.57.0)", "pandas (>=1.4.0)"] +build = ["cython (>=3.0.10)", "meson-python (>=0.17.1)", "numpy (>=1.22.0)", "scipy (>=1.8.0)"] +docs = ["Pillow (>=8.4.0)", "matplotlib (>=3.5.0)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.4.0)", "plotly (>=5.14.0)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.19.0)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-design (>=0.6.0)", "sphinx-gallery (>=0.17.1)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)", "towncrier (>=24.8.0)"] +examples = ["matplotlib (>=3.5.0)", "pandas (>=1.4.0)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.19.0)", "seaborn (>=0.9.0)"] +install = ["joblib (>=1.2.0)", "numpy (>=1.22.0)", "scipy (>=1.8.0)", "threadpoolctl (>=3.1.0)"] +maintenance = ["conda-lock (==3.0.1)"] +tests = ["matplotlib (>=3.5.0)", "mypy (>=1.15)", "numpydoc (>=1.2.0)", "pandas (>=1.4.0)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pyamg (>=4.2.1)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.11.7)", "scikit-image (>=0.19.0)"] + +[[package]] +name = "scipy" +version = "1.16.2" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = ">=3.11" +groups = ["main"] +files = [ + {file = "scipy-1.16.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:6ab88ea43a57da1af33292ebd04b417e8e2eaf9d5aa05700be8d6e1b6501cd92"}, + {file = "scipy-1.16.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c95e96c7305c96ede73a7389f46ccd6c659c4da5ef1b2789466baeaed3622b6e"}, + {file = "scipy-1.16.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:87eb178db04ece7c698220d523c170125dbffebb7af0345e66c3554f6f60c173"}, + {file = "scipy-1.16.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:4e409eac067dcee96a57fbcf424c13f428037827ec7ee3cb671ff525ca4fc34d"}, + {file = "scipy-1.16.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e574be127bb760f0dad24ff6e217c80213d153058372362ccb9555a10fc5e8d2"}, + {file = "scipy-1.16.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f5db5ba6188d698ba7abab982ad6973265b74bb40a1efe1821b58c87f73892b9"}, + {file = "scipy-1.16.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec6e74c4e884104ae006d34110677bfe0098203a3fec2f3faf349f4cb05165e3"}, + {file = "scipy-1.16.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:912f46667d2d3834bc3d57361f854226475f695eb08c08a904aadb1c936b6a88"}, + {file = "scipy-1.16.2-cp311-cp311-win_amd64.whl", hash = "sha256:91e9e8a37befa5a69e9cacbe0bcb79ae5afb4a0b130fd6db6ee6cc0d491695fa"}, + {file = "scipy-1.16.2-cp311-cp311-win_arm64.whl", hash = "sha256:f3bf75a6dcecab62afde4d1f973f1692be013110cad5338007927db8da73249c"}, + {file = "scipy-1.16.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:89d6c100fa5c48472047632e06f0876b3c4931aac1f4291afc81a3644316bb0d"}, + {file = "scipy-1.16.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ca748936cd579d3f01928b30a17dc474550b01272d8046e3e1ee593f23620371"}, + {file = "scipy-1.16.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:fac4f8ce2ddb40e2e3d0f7ec36d2a1e7f92559a2471e59aec37bd8d9de01fec0"}, + {file = "scipy-1.16.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:033570f1dcefd79547a88e18bccacff025c8c647a330381064f561d43b821232"}, + {file = "scipy-1.16.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ea3421209bf00c8a5ef2227de496601087d8f638a2363ee09af059bd70976dc1"}, + {file = "scipy-1.16.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f66bd07ba6f84cd4a380b41d1bf3c59ea488b590a2ff96744845163309ee8e2f"}, + {file = "scipy-1.16.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5e9feab931bd2aea4a23388c962df6468af3d808ddf2d40f94a81c5dc38f32ef"}, + {file = "scipy-1.16.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:03dfc75e52f72cf23ec2ced468645321407faad8f0fe7b1f5b49264adbc29cb1"}, + {file = "scipy-1.16.2-cp312-cp312-win_amd64.whl", hash = "sha256:0ce54e07bbb394b417457409a64fd015be623f36e330ac49306433ffe04bc97e"}, + {file = "scipy-1.16.2-cp312-cp312-win_arm64.whl", hash = "sha256:2a8ffaa4ac0df81a0b94577b18ee079f13fecdb924df3328fc44a7dc5ac46851"}, + {file = "scipy-1.16.2-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:84f7bf944b43e20b8a894f5fe593976926744f6c185bacfcbdfbb62736b5cc70"}, + {file = "scipy-1.16.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:5c39026d12edc826a1ef2ad35ad1e6d7f087f934bb868fc43fa3049c8b8508f9"}, + {file = "scipy-1.16.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e52729ffd45b68777c5319560014d6fd251294200625d9d70fd8626516fc49f5"}, + {file = "scipy-1.16.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:024dd4a118cccec09ca3209b7e8e614931a6ffb804b2a601839499cb88bdf925"}, + {file = "scipy-1.16.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7a5dc7ee9c33019973a470556081b0fd3c9f4c44019191039f9769183141a4d9"}, + {file = "scipy-1.16.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c2275ff105e508942f99d4e3bc56b6ef5e4b3c0af970386ca56b777608ce95b7"}, + {file = "scipy-1.16.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:af80196eaa84f033e48444d2e0786ec47d328ba00c71e4299b602235ffef9acb"}, + {file = "scipy-1.16.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9fb1eb735fe3d6ed1f89918224e3385fbf6f9e23757cacc35f9c78d3b712dd6e"}, + {file = "scipy-1.16.2-cp313-cp313-win_amd64.whl", hash = "sha256:fda714cf45ba43c9d3bae8f2585c777f64e3f89a2e073b668b32ede412d8f52c"}, + {file = "scipy-1.16.2-cp313-cp313-win_arm64.whl", hash = "sha256:2f5350da923ccfd0b00e07c3e5cfb316c1c0d6c1d864c07a72d092e9f20db104"}, + {file = "scipy-1.16.2-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:53d8d2ee29b925344c13bda64ab51785f016b1b9617849dac10897f0701b20c1"}, + {file = "scipy-1.16.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:9e05e33657efb4c6a9d23bd8300101536abd99c85cca82da0bffff8d8764d08a"}, + {file = "scipy-1.16.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:7fe65b36036357003b3ef9d37547abeefaa353b237e989c21027b8ed62b12d4f"}, + {file = "scipy-1.16.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6406d2ac6d40b861cccf57f49592f9779071655e9f75cd4f977fa0bdd09cb2e4"}, + {file = "scipy-1.16.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ff4dc42bd321991fbf611c23fc35912d690f731c9914bf3af8f417e64aca0f21"}, + {file = "scipy-1.16.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:654324826654d4d9133e10675325708fb954bc84dae6e9ad0a52e75c6b1a01d7"}, + {file = "scipy-1.16.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63870a84cd15c44e65220eaed2dac0e8f8b26bbb991456a033c1d9abfe8a94f8"}, + {file = "scipy-1.16.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:fa01f0f6a3050fa6a9771a95d5faccc8e2f5a92b4a2e5440a0fa7264a2398472"}, + {file = "scipy-1.16.2-cp313-cp313t-win_amd64.whl", hash = "sha256:116296e89fba96f76353a8579820c2512f6e55835d3fad7780fece04367de351"}, + {file = "scipy-1.16.2-cp313-cp313t-win_arm64.whl", hash = "sha256:98e22834650be81d42982360382b43b17f7ba95e0e6993e2a4f5b9ad9283a94d"}, + {file = "scipy-1.16.2-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:567e77755019bb7461513c87f02bb73fb65b11f049aaaa8ca17cfaa5a5c45d77"}, + {file = "scipy-1.16.2-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:17d9bb346194e8967296621208fcdfd39b55498ef7d2f376884d5ac47cec1a70"}, + {file = "scipy-1.16.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:0a17541827a9b78b777d33b623a6dcfe2ef4a25806204d08ead0768f4e529a88"}, + {file = "scipy-1.16.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:d7d4c6ba016ffc0f9568d012f5f1eb77ddd99412aea121e6fa8b4c3b7cbad91f"}, + {file = "scipy-1.16.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9702c4c023227785c779cba2e1d6f7635dbb5b2e0936cdd3a4ecb98d78fd41eb"}, + {file = "scipy-1.16.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d1cdf0ac28948d225decdefcc45ad7dd91716c29ab56ef32f8e0d50657dffcc7"}, + {file = "scipy-1.16.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:70327d6aa572a17c2941cdfb20673f82e536e91850a2e4cb0c5b858b690e1548"}, + {file = "scipy-1.16.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5221c0b2a4b58aa7c4ed0387d360fd90ee9086d383bb34d9f2789fafddc8a936"}, + {file = "scipy-1.16.2-cp314-cp314-win_amd64.whl", hash = "sha256:f5a85d7b2b708025af08f060a496dd261055b617d776fc05a1a1cc69e09fe9ff"}, + {file = "scipy-1.16.2-cp314-cp314-win_arm64.whl", hash = "sha256:2cc73a33305b4b24556957d5857d6253ce1e2dcd67fa0ff46d87d1670b3e1e1d"}, + {file = "scipy-1.16.2-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:9ea2a3fed83065d77367775d689401a703d0f697420719ee10c0780bcab594d8"}, + {file = "scipy-1.16.2-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:7280d926f11ca945c3ef92ba960fa924e1465f8d07ce3a9923080363390624c4"}, + {file = "scipy-1.16.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:8afae1756f6a1fe04636407ef7dbece33d826a5d462b74f3d0eb82deabefd831"}, + {file = "scipy-1.16.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:5c66511f29aa8d233388e7416a3f20d5cae7a2744d5cee2ecd38c081f4e861b3"}, + {file = "scipy-1.16.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:efe6305aeaa0e96b0ccca5ff647a43737d9a092064a3894e46c414db84bc54ac"}, + {file = "scipy-1.16.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7f3a337d9ae06a1e8d655ee9d8ecb835ea5ddcdcbd8d23012afa055ab014f374"}, + {file = "scipy-1.16.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bab3605795d269067d8ce78a910220262711b753de8913d3deeaedb5dded3bb6"}, + {file = "scipy-1.16.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b0348d8ddb55be2a844c518cd8cc8deeeb8aeba707cf834db5758fc89b476a2c"}, + {file = "scipy-1.16.2-cp314-cp314t-win_amd64.whl", hash = "sha256:26284797e38b8a75e14ea6631d29bda11e76ceaa6ddb6fdebbfe4c4d90faf2f9"}, + {file = "scipy-1.16.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d2a4472c231328d4de38d5f1f68fdd6d28a615138f842580a8a321b5845cf779"}, + {file = "scipy-1.16.2.tar.gz", hash = "sha256:af029b153d243a80afb6eabe40b0a07f8e35c9adc269c019f364ad747f826a6b"}, +] + +[package.dependencies] +numpy = ">=1.25.2,<2.6" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] +doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.19.1)", "jupytext", "linkify-it-py", "matplotlib (>=3.5)", "myst-nb (>=1.2.0)", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.2.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"] +test = ["Cython", "array-api-strict (>=2.3.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest (>=8.0.0)", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "selenium" +version = "4.35.0" +description = "Official Python bindings for Selenium WebDriver" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "selenium-4.35.0-py3-none-any.whl", hash = "sha256:90bb6c6091fa55805785cf1660fa1e2176220475ccdb466190f654ef8eef6114"}, + {file = "selenium-4.35.0.tar.gz", hash = "sha256:83937a538afb40ef01e384c1405c0863fa184c26c759d34a1ebbe7b925d3481c"}, +] + +[package.dependencies] +certifi = ">=2025.6.15" +trio = ">=0.30.0,<0.31.0" +trio-websocket = ">=0.12.2,<0.13.0" +typing_extensions = ">=4.14.0,<4.15.0" +urllib3 = {version = ">=2.5.0,<3.0", extras = ["socks"]} +websocket-client = ">=1.8.0,<1.9.0" + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] + +[[package]] +name = "soupsieve" +version = "2.8" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c"}, + {file = "soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.43" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "SQLAlchemy-2.0.43-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21ba7a08a4253c5825d1db389d4299f64a100ef9800e4624c8bf70d8f136e6ed"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11b9503fa6f8721bef9b8567730f664c5a5153d25e247aadc69247c4bc605227"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07097c0a1886c150ef2adba2ff7437e84d40c0f7dcb44a2c2b9c905ccfc6361c"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cdeff998cb294896a34e5b2f00e383e7c5c4ef3b4bfa375d9104723f15186443"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:bcf0724a62a5670e5718957e05c56ec2d6850267ea859f8ad2481838f889b42c"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-win32.whl", hash = "sha256:c697575d0e2b0a5f0433f679bda22f63873821d991e95a90e9e52aae517b2e32"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-win_amd64.whl", hash = "sha256:d34c0f6dbefd2e816e8f341d0df7d4763d382e3f452423e752ffd1e213da2512"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70322986c0c699dca241418fcf18e637a4369e0ec50540a2b907b184c8bca069"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87accdbba88f33efa7b592dc2e8b2a9c2cdbca73db2f9d5c510790428c09c154"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c00e7845d2f692ebfc7d5e4ec1a3fd87698e4337d09e58d6749a16aedfdf8612"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:022e436a1cb39b13756cf93b48ecce7aa95382b9cfacceb80a7d263129dfd019"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c5e73ba0d76eefc82ec0219d2301cb33bfe5205ed7a2602523111e2e56ccbd20"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9c2e02f06c68092b875d5cbe4824238ab93a7fa35d9c38052c033f7ca45daa18"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-win32.whl", hash = "sha256:e7a903b5b45b0d9fa03ac6a331e1c1d6b7e0ab41c63b6217b3d10357b83c8b00"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-win_amd64.whl", hash = "sha256:4bf0edb24c128b7be0c61cd17eef432e4bef507013292415f3fb7023f02b7d4b"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52d9b73b8fb3e9da34c2b31e6d99d60f5f99fd8c1225c9dad24aeb74a91e1d29"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f42f23e152e4545157fa367b2435a1ace7571cab016ca26038867eb7df2c3631"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fb1a8c5438e0c5ea51afe9c6564f951525795cf432bed0c028c1cb081276685"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db691fa174e8f7036afefe3061bc40ac2b770718be2862bfb03aabae09051aca"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2b3b4927d0bc03d02ad883f402d5de201dbc8894ac87d2e981e7d87430e60d"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d3d9b904ad4a6b175a2de0738248822f5ac410f52c2fd389ada0b5262d6a1e3"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-win32.whl", hash = "sha256:5cda6b51faff2639296e276591808c1726c4a77929cfaa0f514f30a5f6156921"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-win_amd64.whl", hash = "sha256:c5d1730b25d9a07727d20ad74bc1039bbbb0a6ca24e6769861c1aa5bf2c4c4a8"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:20d81fc2736509d7a2bd33292e489b056cbae543661bb7de7ce9f1c0cd6e7f24"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b9fc27650ff5a2c9d490c13c14906b918b0de1f8fcbb4c992712d8caf40e83"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6772e3ca8a43a65a37c88e2f3e2adfd511b0b1da37ef11ed78dea16aeae85bd9"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a113da919c25f7f641ffbd07fbc9077abd4b3b75097c888ab818f962707eb48"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4286a1139f14b7d70141c67a8ae1582fc2b69105f1b09d9573494eb4bb4b2687"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:529064085be2f4d8a6e5fab12d36ad44f1909a18848fcfbdb59cc6d4bbe48efe"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-win32.whl", hash = "sha256:b535d35dea8bbb8195e7e2b40059e2253acb2b7579b73c1b432a35363694641d"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-win_amd64.whl", hash = "sha256:1c6d85327ca688dbae7e2b06d7d84cfe4f3fffa5b5f9e21bb6ce9d0e1a0e0e0a"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e7c08f57f75a2bb62d7ee80a89686a5e5669f199235c6d1dac75cd59374091c3"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:14111d22c29efad445cd5021a70a8b42f7d9152d8ba7f73304c4d82460946aaa"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b27b56eb2f82653168cefe6cb8e970cdaf4f3a6cb2c5e3c3c1cf3158968ff9"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c5a9da957c56e43d72126a3f5845603da00e0293720b03bde0aacffcf2dc04f"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d79f9fdc9584ec83d1b3c75e9f4595c49017f5594fee1a2217117647225d738"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9df7126fd9db49e3a5a3999442cc67e9ee8971f3cb9644250107d7296cb2a164"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-win32.whl", hash = "sha256:7f1ac7828857fcedb0361b48b9ac4821469f7694089d15550bbcf9ab22564a1d"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-win_amd64.whl", hash = "sha256:971ba928fcde01869361f504fcff3b7143b47d30de188b11c6357c0505824197"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4e6aeb2e0932f32950cf56a8b4813cb15ff792fc0c9b3752eaf067cfe298496a"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:61f964a05356f4bca4112e6334ed7c208174511bd56e6b8fc86dad4d024d4185"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46293c39252f93ea0910aababa8752ad628bcce3a10d3f260648dd472256983f"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:136063a68644eca9339d02e6693932116f6a8591ac013b0014479a1de664e40a"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6e2bf13d9256398d037fef09fd8bf9b0bf77876e22647d10761d35593b9ac547"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:44337823462291f17f994d64282a71c51d738fc9ef561bf265f1d0fd9116a782"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-win32.whl", hash = "sha256:13194276e69bb2af56198fef7909d48fd34820de01d9c92711a5fa45497cc7ed"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-win_amd64.whl", hash = "sha256:334f41fa28de9f9be4b78445e68530da3c5fa054c907176460c81494f4ae1f5e"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ceb5c832cc30663aeaf5e39657712f4c4241ad1f638d487ef7216258f6d41fe7"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11f43c39b4b2ec755573952bbcc58d976779d482f6f832d7f33a8d869ae891bf"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:413391b2239db55be14fa4223034d7e13325a1812c8396ecd4f2c08696d5ccad"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c379e37b08c6c527181a397212346be39319fb64323741d23e46abd97a400d34"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03d73ab2a37d9e40dec4984d1813d7878e01dbdc742448d44a7341b7a9f408c7"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8cee08f15d9e238ede42e9bbc1d6e7158d0ca4f176e4eab21f88ac819ae3bd7b"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-win32.whl", hash = "sha256:b3edaec7e8b6dc5cd94523c6df4f294014df67097c8217a89929c99975811414"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-win_amd64.whl", hash = "sha256:227119ce0a89e762ecd882dc661e0aa677a690c914e358f0dd8932a2e8b2765b"}, + {file = "sqlalchemy-2.0.43-py3-none-any.whl", hash = "sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc"}, + {file = "sqlalchemy-2.0.43.tar.gz", hash = "sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417"}, +] + +[package.dependencies] +greenlet = {version = ">=1", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "starlette" +version = "0.47.3" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51"}, + {file = "starlette-0.47.3.tar.gz", hash = "sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9"}, +] + +[package.dependencies] +anyio = ">=3.6.2,<5" +typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\""} + +[package.extras] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] + +[[package]] +name = "sympy" +version = "1.14.0" +description = "Computer algebra system (CAS) in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5"}, + {file = "sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517"}, +] + +[package.dependencies] +mpmath = ">=1.1.0,<1.4" + +[package.extras] +dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] + +[[package]] +name = "threadpoolctl" +version = "3.6.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb"}, + {file = "threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e"}, +] + +[[package]] +name = "trio" +version = "0.30.0" +description = "A friendly Python library for async concurrency and I/O" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "trio-0.30.0-py3-none-any.whl", hash = "sha256:3bf4f06b8decf8d3cf00af85f40a89824669e2d033bb32469d34840edcfc22a5"}, + {file = "trio-0.30.0.tar.gz", hash = "sha256:0781c857c0c81f8f51e0089929a26b5bb63d57f927728a5586f7e36171f064df"}, +] + +[package.dependencies] +attrs = ">=23.2.0" +cffi = {version = ">=1.14", markers = "os_name == \"nt\" and implementation_name != \"pypy\""} +idna = "*" +outcome = "*" +sniffio = ">=1.3.0" +sortedcontainers = "*" + +[[package]] +name = "trio-websocket" +version = "0.12.2" +description = "WebSocket library for Trio" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "trio_websocket-0.12.2-py3-none-any.whl", hash = "sha256:df605665f1db533f4a386c94525870851096a223adcb97f72a07e8b4beba45b6"}, + {file = "trio_websocket-0.12.2.tar.gz", hash = "sha256:22c72c436f3d1e264d0910a3951934798dcc5b00ae56fc4ee079d46c7cf20fae"}, +] + +[package.dependencies] +outcome = ">=1.2.0" +trio = ">=0.11" +wsproto = ">=0.14" + +[[package]] +name = "typing-extensions" +version = "4.14.1" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, + {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, + {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "uritemplate" +version = "4.2.0" +description = "Implementation of RFC 6570 URI Templates" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "uritemplate-4.2.0-py3-none-any.whl", hash = "sha256:962201ba1c4edcab02e60f9a0d3821e82dfc5d2d6662a21abd533879bdb8a686"}, + {file = "uritemplate-4.2.0.tar.gz", hash = "sha256:480c2ed180878955863323eea31b0ede668795de182617fef9c6ca09e6ec9d0e"}, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, + {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, +] + +[package.dependencies] +pysocks = {version = ">=1.5.6,<1.5.7 || >1.5.7,<2.0", optional = true, markers = "extra == \"socks\""} + +[package.extras] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvicorn" +version = "0.35.0" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a"}, + {file = "uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" + +[package.extras] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "websocket-client" +version = "1.8.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "win32-setctime" +version = "1.2.0" +description = "A small Python utility to set file creation time on Windows" +optional = false +python-versions = ">=3.5" +groups = ["main"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, + {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, +] + +[package.extras] +dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] + +[[package]] +name = "wsproto" +version = "1.2.0" +description = "WebSockets state-machine based protocol implementation" +optional = false +python-versions = ">=3.7.0" +groups = ["main"] +files = [ + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, +] + +[package.dependencies] +h11 = ">=0.9.0,<1" + +[metadata] +lock-version = "2.1" +python-versions = ">=3.11,<3.14" +content-hash = "9ca2813b1931948bd0600aa974faba9311a7cf00ea632ea4db5f9fcc80ebc518" From 000f524bb4ea15cee222e47e777ac1523832f77e Mon Sep 17 00:00:00 2001 From: thkim7 Date: Fri, 12 Sep 2025 10:04:34 +0900 Subject: [PATCH 06/57] =?UTF-8?q?chore:=20poetry=20run=20black=20&=20ci-py?= =?UTF-8?q?thon=20=EC=9B=90=EC=83=81=20=EB=B3=B5=EA=B5=AC?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/ci-python.yml | 2 -- apps/pre-processing-service/app/service/similarity_service.py | 2 +- apps/pre-processing-service/app/utils/similarity_analyzer.py | 2 +- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-python.yml b/.github/workflows/ci-python.yml index 54d1ab1a..ad50c51b 100644 --- a/.github/workflows/ci-python.yml +++ b/.github/workflows/ci-python.yml @@ -2,8 +2,6 @@ name: CI (Python/FastAPI) on: push: - branches: - - feature/onnx tags: - 'pre-processing-v*' pull_request: diff --git a/apps/pre-processing-service/app/service/similarity_service.py b/apps/pre-processing-service/app/service/similarity_service.py index 23c34742..dbd2b762 100644 --- a/apps/pre-processing-service/app/service/similarity_service.py +++ b/apps/pre-processing-service/app/service/similarity_service.py @@ -174,4 +174,4 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict logger.error( f"유사도 분석 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'" ) - raise InvalidItemDataException() \ No newline at end of file + raise InvalidItemDataException() diff --git a/apps/pre-processing-service/app/utils/similarity_analyzer.py b/apps/pre-processing-service/app/utils/similarity_analyzer.py index b847a10f..d4e7c0c5 100644 --- a/apps/pre-processing-service/app/utils/similarity_analyzer.py +++ b/apps/pre-processing-service/app/utils/similarity_analyzer.py @@ -90,4 +90,4 @@ def analyze_similarity_batch( return results except Exception as e: logger.error(f"배치 유사도 분석 실패: {e}") - raise \ No newline at end of file + raise From 78c43d8261f26d3f74df0b35a610501093caf62e Mon Sep 17 00:00:00 2001 From: thkim7 Date: Fri, 12 Sep 2025 10:06:17 +0900 Subject: [PATCH 07/57] =?UTF-8?q?chore:=20ci-python=20=EB=A8=B8=EC=A7=80?= =?UTF-8?q?=20=ED=85=8C=EC=8A=A4=ED=8A=B8=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/ci-python.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci-python.yml b/.github/workflows/ci-python.yml index ad50c51b..54d1ab1a 100644 --- a/.github/workflows/ci-python.yml +++ b/.github/workflows/ci-python.yml @@ -2,6 +2,8 @@ name: CI (Python/FastAPI) on: push: + branches: + - feature/onnx tags: - 'pre-processing-v*' pull_request: From be53bb4ed9b367fbafd71bc0ef5f99059dc336be Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Sat, 13 Sep 2025 12:03:17 +0900 Subject: [PATCH 08/57] =?UTF-8?q?typeHandler=EA=B0=80=20=EB=93=B1=EB=A1=9D?= =?UTF-8?q?=EB=90=98=EC=A7=80=20=EC=95=8A=EB=8A=94=20=EB=B2=84=EA=B7=B8=20?= =?UTF-8?q?(#88)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/user-service/src/main/resources/application.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/user-service/src/main/resources/application.yml b/apps/user-service/src/main/resources/application.yml index d6f68b0e..7ede99ae 100644 --- a/apps/user-service/src/main/resources/application.yml +++ b/apps/user-service/src/main/resources/application.yml @@ -10,7 +10,7 @@ spring: mybatis: # Mapper XML 파일 위치 mapper-locations: classpath:mapper/**/*.xml - type-handlers-package: site.icebang.config.mybatis.typehandler + type-handlers-package: site.icebang.global.config.mybatis.typehandler # 외부 API 연동을 위한 설정 섹션 api: From 5579b7c0e612dedf479b05a8bea4a719b9ce7f3c Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Sat, 13 Sep 2025 13:13:55 +0900 Subject: [PATCH 09/57] =?UTF-8?q?=EA=B8=B0=EB=B3=B8=EC=A0=81=EC=9D=B8=20gl?= =?UTF-8?q?obal=20exception=20handling=20(#90)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: 기본적인 global exception handling * chore: Security exception handling * feat: Duplicate Exception handling --- .../exception/DuplicateDataException.java | 25 +++++++++ .../domain/auth/service/AuthService.java | 3 +- .../config/security/SecurityConfig.java | 11 ++++ .../exception/GlobalExceptionHandler.java | 54 +++++++++++++++++++ .../exception/RestAccessDeniedHandler.java | 33 ++++++++++++ .../RestAuthenticationEntryPoint.java | 34 ++++++++++++ .../scenario/UserRegistrationFlowE2eTest.java | 49 +---------------- 7 files changed, 160 insertions(+), 49 deletions(-) create mode 100644 apps/user-service/src/main/java/site/icebang/common/exception/DuplicateDataException.java create mode 100644 apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java create mode 100644 apps/user-service/src/main/java/site/icebang/global/handler/exception/RestAccessDeniedHandler.java create mode 100644 apps/user-service/src/main/java/site/icebang/global/handler/exception/RestAuthenticationEntryPoint.java diff --git a/apps/user-service/src/main/java/site/icebang/common/exception/DuplicateDataException.java b/apps/user-service/src/main/java/site/icebang/common/exception/DuplicateDataException.java new file mode 100644 index 00000000..e673ab86 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/common/exception/DuplicateDataException.java @@ -0,0 +1,25 @@ +package site.icebang.common.exception; + +public class DuplicateDataException extends RuntimeException { + + public DuplicateDataException() { + super(); + } + + public DuplicateDataException(String message) { + super(message); + } + + public DuplicateDataException(String message, Throwable cause) { + super(message, cause); + } + + public DuplicateDataException(Throwable cause) { + super(cause); + } + + protected DuplicateDataException( + String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthService.java b/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthService.java index 091861b2..25a5bd42 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/service/AuthService.java @@ -6,6 +6,7 @@ import lombok.RequiredArgsConstructor; +import site.icebang.common.exception.DuplicateDataException; import site.icebang.common.utils.RandomPasswordGenerator; import site.icebang.domain.auth.dto.RegisterDto; import site.icebang.domain.auth.mapper.AuthMapper; @@ -23,7 +24,7 @@ public class AuthService { public void registerUser(RegisterDto registerDto) { if (authMapper.existsByEmail(registerDto.getEmail())) { - throw new IllegalArgumentException("이미 가입된 이메일입니다."); + throw new DuplicateDataException("이미 가입된 이메일입니다."); } String randomPassword = passwordGenerator.generate(); String hashedPassword = passwordEncoder.encode(randomPassword); diff --git a/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java b/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java index aba3ee3c..c915867d 100644 --- a/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java @@ -19,16 +19,23 @@ import org.springframework.web.cors.UrlBasedCorsConfigurationSource; import org.springframework.web.filter.CorsFilter; +import com.fasterxml.jackson.databind.ObjectMapper; + import lombok.RequiredArgsConstructor; import site.icebang.domain.auth.service.AuthCredentialAdapter; import site.icebang.global.config.security.endpoints.SecurityEndpoints; +import site.icebang.global.handler.exception.RestAccessDeniedHandler; +import site.icebang.global.handler.exception.RestAuthenticationEntryPoint; @Configuration @RequiredArgsConstructor public class SecurityConfig { private final Environment environment; private final AuthCredentialAdapter userDetailsService; + private final ObjectMapper objectMapper; + private final RestAuthenticationEntryPoint restAuthenticationEntryPoint; + private final RestAccessDeniedHandler restAccessDeniedHandler; @Bean public AuthenticationProvider authenticationProvider() { @@ -97,6 +104,10 @@ public SecurityFilterChain filterChain(HttpSecurity http) throws Exception { .logout( logout -> logout.logoutUrl("/auth/logout").logoutSuccessUrl("/auth/login").permitAll()) .csrf(AbstractHttpConfigurer::disable) + .exceptionHandling( + ex -> + ex.authenticationEntryPoint(restAuthenticationEntryPoint) + .accessDeniedHandler(restAccessDeniedHandler)) .build(); } diff --git a/apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java b/apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java new file mode 100644 index 00000000..6923f455 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java @@ -0,0 +1,54 @@ +package site.icebang.global.handler.exception; + +import org.springframework.http.HttpStatus; +import org.springframework.security.access.AccessDeniedException; +import org.springframework.security.core.AuthenticationException; +import org.springframework.web.bind.MethodArgumentNotValidException; +import org.springframework.web.bind.annotation.ExceptionHandler; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestControllerAdvice; +import org.springframework.web.servlet.resource.NoResourceFoundException; + +import site.icebang.common.dto.ApiResponse; +import site.icebang.common.exception.DuplicateDataException; + +@RestControllerAdvice +public class GlobalExceptionHandler { + @ExceptionHandler(MethodArgumentNotValidException.class) + @ResponseStatus(HttpStatus.BAD_REQUEST) + public ApiResponse handleValidation(MethodArgumentNotValidException ex) { + String detail = ex.getBindingResult().toString(); + return ApiResponse.error("Validation failed: " + detail, HttpStatus.BAD_REQUEST); + } + + @ExceptionHandler(Exception.class) + @ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR) + public ApiResponse handleGeneric(Exception ex) { + return ApiResponse.error( + "Internal error: " + ex.getMessage(), HttpStatus.INTERNAL_SERVER_ERROR); + } + + @ExceptionHandler(NoResourceFoundException.class) + @ResponseStatus(HttpStatus.NOT_FOUND) + public ApiResponse handleNotFound(NoResourceFoundException ex) { + return ApiResponse.error("Notfound: " + ex.getMessage(), HttpStatus.NOT_FOUND); + } + + @ExceptionHandler(AuthenticationException.class) + @ResponseStatus(HttpStatus.UNAUTHORIZED) + public ApiResponse handleAuthentication(AuthenticationException ex) { + return ApiResponse.error("Authentication failed: " + ex.getMessage(), HttpStatus.UNAUTHORIZED); + } + + @ExceptionHandler(AccessDeniedException.class) + @ResponseStatus(HttpStatus.FORBIDDEN) + public ApiResponse handleAccessDenied(AccessDeniedException ex) { + return ApiResponse.error("Access denied: " + ex.getMessage(), HttpStatus.FORBIDDEN); + } + + @ExceptionHandler(DuplicateDataException.class) + @ResponseStatus(HttpStatus.CONFLICT) + public ApiResponse handleDuplicateData(DuplicateDataException ex) { + return ApiResponse.error("Duplicate: " + ex.getMessage(), HttpStatus.CONFLICT); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/global/handler/exception/RestAccessDeniedHandler.java b/apps/user-service/src/main/java/site/icebang/global/handler/exception/RestAccessDeniedHandler.java new file mode 100644 index 00000000..efeffde1 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/global/handler/exception/RestAccessDeniedHandler.java @@ -0,0 +1,33 @@ +package site.icebang.global.handler.exception; + +import java.io.IOException; + +import org.springframework.http.HttpStatus; +import org.springframework.security.access.AccessDeniedException; +import org.springframework.security.web.access.AccessDeniedHandler; +import org.springframework.stereotype.Component; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import lombok.RequiredArgsConstructor; + +import site.icebang.common.dto.ApiResponse; + +@Component +@RequiredArgsConstructor +public class RestAccessDeniedHandler implements AccessDeniedHandler { + private final ObjectMapper objectMapper; + + @Override + public void handle( + HttpServletRequest request, HttpServletResponse response, AccessDeniedException ex) + throws IOException { + ApiResponse body = ApiResponse.error("Access denied", HttpStatus.FORBIDDEN); + + response.setStatus(HttpServletResponse.SC_FORBIDDEN); + response.setContentType("application/json;charset=UTF-8"); + response.getWriter().write(objectMapper.writeValueAsString(body)); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/global/handler/exception/RestAuthenticationEntryPoint.java b/apps/user-service/src/main/java/site/icebang/global/handler/exception/RestAuthenticationEntryPoint.java new file mode 100644 index 00000000..b7c50d76 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/global/handler/exception/RestAuthenticationEntryPoint.java @@ -0,0 +1,34 @@ +package site.icebang.global.handler.exception; + +import java.io.IOException; + +import org.springframework.http.HttpStatus; +import org.springframework.security.core.AuthenticationException; +import org.springframework.security.web.AuthenticationEntryPoint; +import org.springframework.stereotype.Component; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import lombok.RequiredArgsConstructor; + +import site.icebang.common.dto.ApiResponse; + +@Component +@RequiredArgsConstructor +public class RestAuthenticationEntryPoint implements AuthenticationEntryPoint { + private final ObjectMapper objectMapper; + + @Override + public void commence( + HttpServletRequest request, HttpServletResponse response, AuthenticationException ex) + throws IOException { + ApiResponse body = + ApiResponse.error("Authentication required", HttpStatus.UNAUTHORIZED); + + response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); + response.setContentType("application/json;charset=UTF-8"); + response.getWriter().write(objectMapper.writeValueAsString(body)); + } +} diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java index a873d2d5..1cf10e95 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java @@ -7,7 +7,6 @@ import java.util.HashMap; import java.util.Map; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.springframework.http.*; @@ -116,52 +115,6 @@ void completeUserRegistrationFlow() throws Exception { logCompletion("ERP 사용자 등록 플로우"); } - @Disabled - @DisplayName("로그인 없이 리소스 접근 시 모든 요청 차단") - void accessResourcesWithoutLogin_shouldFailForAll() { - logStep(1, "인증 없이 조직 목록 조회 시도"); - - // 1. 로그인 없이 조직 목록 조회 시도 - ResponseEntity orgResponse = - restTemplate.getForEntity(getV0ApiUrl("/organizations"), Map.class); - - assertThat(orgResponse.getStatusCode()).isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN); - logSuccess("미인증 조직 조회 차단 확인"); - - logStep(2, "인증 없이 조직 옵션 조회 시도"); - - // 2. 로그인 없이 조직 옵션 조회 시도 - ResponseEntity optResponse = - restTemplate.getForEntity(getV0ApiUrl("/organizations/1/options"), Map.class); - - assertThat(optResponse.getStatusCode()).isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN); - logSuccess("미인증 옵션 조회 차단 확인"); - - logStep(3, "인증 없이 회원가입 시도"); - - // 3. 로그인 없이 회원가입 시도 - Map registerRequest = new HashMap<>(); - registerRequest.put("name", "테스트사용자"); - registerRequest.put("email", "test@example.com"); - registerRequest.put("orgId", 1); - registerRequest.put("deptId", 2); - registerRequest.put("positionId", 5); - registerRequest.put("roleIds", Arrays.asList(6)); - - HttpHeaders headers = new HttpHeaders(); - headers.setContentType(MediaType.APPLICATION_JSON); - - HttpEntity> entity = new HttpEntity<>(registerRequest, headers); - - ResponseEntity regResponse = - restTemplate.postForEntity(getV0ApiUrl("/auth/register"), entity, Map.class); - - assertThat(regResponse.getStatusCode()).isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN); - logSuccess("미인증 회원가입 차단 확인"); - - logCompletion("ERP 보안 검증"); - } - @Test @DisplayName("잘못된 자격증명으로 로그인 시도 시 실패") void loginWithInvalidCredentials_shouldFail() { @@ -200,7 +153,7 @@ void loginWithInvalidCredentials_shouldFail() { } @SuppressWarnings("unchecked") - @Disabled + @Test @DisplayName("중복 이메일로 사용자 등록 시도 시 실패") void register_withDuplicateEmail_shouldFail() { // 선행 조건: 관리자 로그인 From 34c22dcc9619cda297bdbd79c04f255d329c4333 Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Sat, 13 Sep 2025 13:15:00 +0900 Subject: [PATCH 10/57] =?UTF-8?q?Caddyfile=20copy=20=EC=B6=94=EA=B0=80=20(?= =?UTF-8?q?#89)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/deploy-java.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/deploy-java.yml b/.github/workflows/deploy-java.yml index 9c876f2f..eb2865d6 100644 --- a/.github/workflows/deploy-java.yml +++ b/.github/workflows/deploy-java.yml @@ -52,6 +52,16 @@ jobs: target: "~/app/docker/production/" overwrite: true + - name: Copy Caddyfile to EC2 + uses: appleboy/scp-action@v0.1.7 + with: + host: ${{ secrets.SERVER_HOST }} + username: ubuntu + key: ${{ secrets.SERVER_SSH_KEY }} + source: "docker/production/Caddyfile" + target: "~/app/docker/production/" + overwrite: true + - name: Deploy on EC2 uses: appleboy/ssh-action@v1.0.3 with: From 1f3892664fddd9ca3fd561e368a0c429bd758d4b Mon Sep 17 00:00:00 2001 From: kakusia Date: Sun, 14 Sep 2025 13:36:00 +0900 Subject: [PATCH 11/57] =?UTF-8?q?refactor:=20=ED=95=84=EC=9A=94=EC=97=86?= =?UTF-8?q?=EB=8A=94=20API=20url=20=EC=82=AD=EC=A0=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/pre-processing-service/app/api/endpoints/blog.py | 5 ----- apps/pre-processing-service/app/api/endpoints/keywords.py | 8 -------- apps/pre-processing-service/app/api/endpoints/product.py | 8 -------- apps/pre-processing-service/app/api/endpoints/test.py | 5 ----- 4 files changed, 26 deletions(-) diff --git a/apps/pre-processing-service/app/api/endpoints/blog.py b/apps/pre-processing-service/app/api/endpoints/blog.py index 04ae0b14..bdb70557 100644 --- a/apps/pre-processing-service/app/api/endpoints/blog.py +++ b/apps/pre-processing-service/app/api/endpoints/blog.py @@ -9,11 +9,6 @@ router = APIRouter() -@router.get("/", summary="블로그 API 상태 확인") -async def root(): - return {"message": "blog API"} - - @router.post( "/rag/create", response_model=ResponseBlogCreate, diff --git a/apps/pre-processing-service/app/api/endpoints/keywords.py b/apps/pre-processing-service/app/api/endpoints/keywords.py index 2b407d6d..92c8a66b 100644 --- a/apps/pre-processing-service/app/api/endpoints/keywords.py +++ b/apps/pre-processing-service/app/api/endpoints/keywords.py @@ -6,14 +6,6 @@ router = APIRouter() -@router.get("/", summary="키워드 API 상태 확인") -async def root(): - """ - 키워드 API가 정상 동작하는지 확인 - """ - return {"message": "keyword API"} - - @router.post( "/search", response_model=ResponseNaverSearch, summary="네이버 키워드 검색" ) diff --git a/apps/pre-processing-service/app/api/endpoints/product.py b/apps/pre-processing-service/app/api/endpoints/product.py index ceb55c9d..95b983e4 100644 --- a/apps/pre-processing-service/app/api/endpoints/product.py +++ b/apps/pre-processing-service/app/api/endpoints/product.py @@ -16,14 +16,6 @@ router = APIRouter() -@router.get("/", summary="상품 API 상태 확인") -async def root(): - """ - 상품 API 서버 상태 확인용 엔드포인트 - """ - return {"message": "product API"} - - @router.post("/search", response_model=ResponseSadaguSearch, summary="상품 검색") async def search(request: RequestSadaguSearch): """ diff --git a/apps/pre-processing-service/app/api/endpoints/test.py b/apps/pre-processing-service/app/api/endpoints/test.py index e26bd203..9e17a7c4 100644 --- a/apps/pre-processing-service/app/api/endpoints/test.py +++ b/apps/pre-processing-service/app/api/endpoints/test.py @@ -21,11 +21,6 @@ router = APIRouter() -@router.get("/") -async def root(): - return {"message": "테스트 API"} - - @router.get("/hello/{name}", tags=["hello"]) # @log_api_call async def say_hello(name: str): From d60939fd94a3406122431008ae1317dc51e94df3 Mon Sep 17 00:00:00 2001 From: kakusia Date: Sun, 14 Sep 2025 13:37:07 +0900 Subject: [PATCH 12/57] =?UTF-8?q?refactor:=202=EC=B0=A8=20=EB=A7=88?= =?UTF-8?q?=EC=9D=BC=EC=8A=A4=ED=86=A4=EC=9D=84=EC=9C=84=ED=95=9C=20?= =?UTF-8?q?=EA=B3=B5=ED=86=B5=20request=EB=B0=8F=20response=20=EC=A3=BC?= =?UTF-8?q?=EC=84=9D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/model/schemas.py | 38 ++++++++++--------- .../app/service/match_service.py | 17 +++++---- .../app/service/search_service.py | 15 ++++---- 3 files changed, 37 insertions(+), 33 deletions(-) diff --git a/apps/pre-processing-service/app/model/schemas.py b/apps/pre-processing-service/app/model/schemas.py index 61720cb6..52775416 100644 --- a/apps/pre-processing-service/app/model/schemas.py +++ b/apps/pre-processing-service/app/model/schemas.py @@ -5,29 +5,31 @@ # 기본 요청 class RequestBase(BaseModel): - job_id: int = Field( - ..., title="작업 ID", description="현재 실행 중인 작업의 고유 식별자" - ) - schedule_id: int = Field( - ..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자" - ) - schedule_his_id: Optional[int] = Field( - None, title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자" - ) + # job_id: int = Field( + # ..., title="작업 ID", description="현재 실행 중인 작업의 고유 식별자" + # ) + # schedule_id: int = Field( + # ..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자" + # ) + # schedule_his_id: Optional[int] = Field( + # None, title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자" + # ) + pass # 기본 응답 class ResponseBase(BaseModel): - job_id: int = Field( - ..., title="작업 ID", description="현재 실행 중인 작업의 고유 식별자" - ) - schedule_id: int = Field( - ..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자" - ) - schedule_his_id: Optional[int] = Field( - None, title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자" - ) + # job_id: int = Field( + # ..., title="작업 ID", description="현재 실행 중인 작업의 고유 식별자" + # ) + # schedule_id: int = Field( + # ..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자" + # ) + # schedule_his_id: Optional[int] = Field( + # None, title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자" + # ) status: str = Field(..., title="상태", description="요청 처리 상태") + pass # 네이버 키워드 추출 diff --git a/apps/pre-processing-service/app/service/match_service.py b/apps/pre-processing-service/app/service/match_service.py index 5816957a..9f340683 100644 --- a/apps/pre-processing-service/app/service/match_service.py +++ b/apps/pre-processing-service/app/service/match_service.py @@ -16,15 +16,16 @@ def match_products(self, request: RequestSadaguMatch) -> dict: products = request.search_results logger.info( - f"키워드 매칭 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}', products_count={len(products) if products else 0}" + # f"키워드 매칭 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}', products_count={len(products) if products else 0}" + f"keyword='{keyword}'" ) if not products: logger.warning(f"매칭할 상품이 없음: keyword='{keyword}'") return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, + # "job_id": request.job_id, + # "schedule_id": request.schedule_id, + # "schedule_his_id": request.schedule_his_id, "keyword": keyword, "matched_products": [], "status": "success", @@ -80,9 +81,9 @@ def match_products(self, request: RequestSadaguMatch) -> dict: ) return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, + # "job_id": request.job_id, + # "schedule_id": request.schedule_id, + # "schedule_his_id": request.schedule_his_id, "keyword": keyword, "matched_products": matched_products, "status": "success", @@ -90,6 +91,6 @@ def match_products(self, request: RequestSadaguMatch) -> dict: except Exception as e: logger.error( - f"매칭 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'" + # f"매칭 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'" ) raise InvalidItemDataException() diff --git a/apps/pre-processing-service/app/service/search_service.py b/apps/pre-processing-service/app/service/search_service.py index 6fb09c0f..4cb1bf99 100644 --- a/apps/pre-processing-service/app/service/search_service.py +++ b/apps/pre-processing-service/app/service/search_service.py @@ -17,7 +17,8 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: try: logger.info( - f"상품 검색 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}'" + # f"상품 검색 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}'" + f"keyword='{keyword}'" ) # Selenium 또는 httpx로 상품 검색 @@ -29,9 +30,9 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: if not search_results: logger.warning(f"검색 결과가 없습니다: keyword='{keyword}'") return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, + # "job_id": request.job_id, + # "schedule_id": request.schedule_id, + # "schedule_his_id": request.schedule_his_id, "keyword": keyword, "search_results": [], "status": "success", @@ -90,9 +91,9 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: ) return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, + # "job_id": request.job_id, + # "schedule_id": request.schedule_id, + # "schedule_his_id": request.schedule_his_id, "keyword": keyword, "search_results": enriched_results, "status": "success", From e8c83c3380e2aa09b1df4c840226413c17744760 Mon Sep 17 00:00:00 2001 From: kakusia Date: Sun, 14 Sep 2025 14:02:29 +0900 Subject: [PATCH 13/57] =?UTF-8?q?refactor:=20-=202=EC=B0=A8=20=EB=A7=88?= =?UTF-8?q?=EC=9D=BC=EC=8A=A4=ED=86=A4=EC=9D=84=EC=9C=84=ED=95=9C=20?= =?UTF-8?q?=EA=B3=B5=ED=86=B5=20request=EB=B0=8F=20response=20=EC=A3=BC?= =?UTF-8?q?=EC=84=9D=EC=97=90=EB=94=B0=EB=A5=B8=20blog=EC=BD=94=EB=93=9C?= =?UTF-8?q?=20=EB=A6=AC=ED=8E=99=ED=86=A0=EB=A7=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../pre-processing-service/app/api/endpoints/blog.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/apps/pre-processing-service/app/api/endpoints/blog.py b/apps/pre-processing-service/app/api/endpoints/blog.py index bdb70557..138fb706 100644 --- a/apps/pre-processing-service/app/api/endpoints/blog.py +++ b/apps/pre-processing-service/app/api/endpoints/blog.py @@ -44,9 +44,7 @@ async def publish(request: RequestBlogPublish): raise CustomException( "네이버 블로그 포스팅에 실패했습니다.", status_code=500 ) - return ResponseBlogPublish( - job_id=1, schedule_id=1, schedule_his_id=1, status="200", metadata=result - ) + return ResponseBlogPublish(status="success", metadata=result) elif request.tag == "tistory": tistory_service = TistoryBlogPostService() @@ -61,9 +59,7 @@ async def publish(request: RequestBlogPublish): "티스토리 블로그 포스팅에 실패했습니다.", status_code=500 ) - return ResponseBlogPublish( - job_id=1, schedule_id=1, schedule_his_id=1, status="200", metadata=result - ) + return ResponseBlogPublish(status="success", metadata=result) elif request.tag == "blogger": blogger_service = BloggerBlogPostService() @@ -78,6 +74,4 @@ async def publish(request: RequestBlogPublish): "블로거 블로그 포스팅에 실패했습니다.", status_code=500 ) - return ResponseBlogPublish( - job_id=1, schedule_id=1, schedule_his_id=1, status="200", metadata=result - ) + return ResponseBlogPublish(status="success", metadata=result) From 959adf4305a201bf351141ecd82bad1b9f7d71f5 Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Mon, 15 Sep 2025 11:35:23 +0900 Subject: [PATCH 14/57] Workflow dummy data insert sql (alpha) (#93) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: workflow dummy data (experimental) * chore: Super admin에 workflow 할당 --- .../main/resources/application-develop.yml | 1 + .../src/main/resources/sql/01-schema.sql | 22 +++- .../main/resources/sql/03-insert-workflow.sql | 120 ++++++++++++++++++ 3 files changed, 142 insertions(+), 1 deletion(-) create mode 100644 apps/user-service/src/main/resources/sql/03-insert-workflow.sql diff --git a/apps/user-service/src/main/resources/application-develop.yml b/apps/user-service/src/main/resources/application-develop.yml index e7bc3f09..b8cb1648 100644 --- a/apps/user-service/src/main/resources/application-develop.yml +++ b/apps/user-service/src/main/resources/application-develop.yml @@ -33,6 +33,7 @@ spring: - classpath:sql/00-truncate.sql - classpath:sql/01-insert-internal-users.sql - classpath:sql/02-insert-external-users.sql + - classpath:sql/03-insert-workflow.sql encoding: UTF-8 mybatis: diff --git a/apps/user-service/src/main/resources/sql/01-schema.sql b/apps/user-service/src/main/resources/sql/01-schema.sql index 569c452a..f09418b0 100644 --- a/apps/user-service/src/main/resources/sql/01-schema.sql +++ b/apps/user-service/src/main/resources/sql/01-schema.sql @@ -284,4 +284,24 @@ CREATE TABLE `task_run` ( INDEX `idx_task_run_job_run_id` (`job_run_id`), INDEX `idx_task_run_status` (`status`), INDEX `idx_task_run_task_id` (`task_id`) - ); \ No newline at end of file + ); + +-- v0.0.3 +DROP TABLE IF EXISTS `config`; + +ALTER TABLE `workflow_job` + ADD COLUMN `execution_order` INT NULL AFTER `job_id`; + + +ALTER TABLE `schedule` + ADD COLUMN `schedule_text` varchar(20) NULL; + +ALTER TABLE `workflow` + ADD COLUMN `default_config`json NULL; + + +ALTER TABLE `user` + ADD COLUMN `joined_at` timestamp NULL; + +ALTER TABLE `department` + ADD COLUMN `description` varchar(100) NULL; \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/03-insert-workflow.sql b/apps/user-service/src/main/resources/sql/03-insert-workflow.sql new file mode 100644 index 00000000..dd2ddb15 --- /dev/null +++ b/apps/user-service/src/main/resources/sql/03-insert-workflow.sql @@ -0,0 +1,120 @@ +-- 워크플로우 관련 데이터 삽입 + +-- 카테고리 삽입 +INSERT INTO `category` (`name`, `description`) VALUES + ('마케팅', '마케팅 관련 자동화 워크플로우'), + ('콘텐츠', '콘텐츠 생성 및 관리'), + ('데이터 수집', '웹 크롤링 및 데이터 수집 관련'); + +-- 워크플로우 생성 +INSERT INTO `workflow` (`name`, `description`, `is_enabled`, `created_by`) VALUES + ('트렌드_블로그_자동화', '트렌드 검색부터 블로그 글 작성까지 전체 자동화 프로세스', TRUE, 1); + +-- Job 생성 +INSERT INTO `job` (`name`, `description`, `is_enabled`, `created_by`) VALUES + ('트렌드_검색_작업', '최신 트렌드 키워드 검색 및 분석', TRUE, 1), + ('싸다구_크롤링_작업', '싸다구 사이트에서 관련 상품 정보 크롤링', TRUE, 1), + ('블로그_글_작성_작업', '수집된 데이터를 바탕으로 블로그 글 자동 생성', TRUE, 1); + +-- Task 생성 +INSERT INTO `task` (`name`, `type`, `parameters`) VALUES +-- 트렌드 검색 관련 태스크 +('구글_트렌드_검색', 'API_CALL', JSON_OBJECT( + 'api_endpoint', 'https://trends.googleapis.com/trends/api', + 'search_region', 'KR', + 'timeframe', 'now 7-d', + 'category', '0' + )), +('네이버_트렌드_검색', 'API_CALL', JSON_OBJECT( + 'api_endpoint', 'https://datalab.naver.com/keyword/trendSearch.naver', + 'period', 'week', + 'device', 'pc' + )), +('키워드_분석_및_필터링', 'DATA_PROCESSING', JSON_OBJECT( + 'min_score', 50, + 'max_keywords', 10, + 'filter_rules', JSON_ARRAY('adult_content', 'spam_keywords') + )), + +-- 싸다구 크롤링 관련 태스크 +('싸다구_상품_검색', 'WEB_SCRAPING', JSON_OBJECT( + 'base_url', 'https://www.ssg.com', + 'search_path', '/search.ssg', + 'max_pages', 3, + 'delay_ms', 2000 + )), +('상품_정보_추출', 'DATA_EXTRACTION', JSON_OBJECT( + 'extract_fields', JSON_ARRAY('title', 'price', 'rating', 'review_count', 'image_url'), + 'data_validation', true + )), +('가격_비교_분석', 'DATA_ANALYSIS', JSON_OBJECT( + 'comparison_sites', JSON_ARRAY('쿠팡', '11번가', '옥션'), + 'price_threshold', 0.1 + )), + +-- 블로그 글 작성 관련 태스크 +('블로그_템플릿_선택', 'TEMPLATE_PROCESSING', JSON_OBJECT( + 'template_type', 'product_review', + 'style', 'conversational', + 'target_length', 1500 + )), +('AI_콘텐츠_생성', 'AI_GENERATION', JSON_OBJECT( + 'model', 'gpt-4', + 'temperature', 0.7, + 'max_tokens', 2000, + 'prompt_template', '트렌드 키워드와 상품 정보를 바탕으로 자연스러운 블로그 글을 작성해주세요.' + )), +('콘텐츠_검수_및_최적화', 'CONTENT_REVIEW', JSON_OBJECT( + 'seo_optimization', true, + 'readability_check', true, + 'plagiarism_check', true + )), +('블로그_플랫폼_발행', 'PUBLISHING', JSON_OBJECT( + 'platforms', JSON_ARRAY('네이버 블로그', '티스토리', '브런치'), + 'schedule_publish', false, + 'auto_tags', true + )); + +-- 워크플로우-Job 연결 +INSERT INTO `workflow_job` (`workflow_id`, `job_id`) VALUES + (1, 1), -- 트렌드_블로그_자동화 + 트렌드_검색_작업 + (1, 2), -- 트렌드_블로그_자동화 + 싸다구_크롤링_작업 + (1, 3); -- 트렌드_블로그_자동화 + 블로그_글_작성_작업 + +-- Job-Task 연결 (실행 순서 포함) +-- 트렌드 검색 작업의 태스크들 +INSERT INTO `job_task` (`job_id`, `task_id`, `execution_order`) VALUES + (1, 1, 1), -- 구글_트렌드_검색 + (1, 2, 2), -- 네이버_트렌드_검색 + (1, 3, 3); -- 키워드_분석_및_필터링 + +-- 싸다구 크롤링 작업의 태스크들 +INSERT INTO `job_task` (`job_id`, `task_id`, `execution_order`) VALUES + (2, 4, 1), -- 싸다구_상품_검색 + (2, 5, 2), -- 상품_정보_추출 + (2, 6, 3); -- 가격_비교_분석 + +-- 블로그 글 작성 작업의 태스크들 +INSERT INTO `job_task` (`job_id`, `task_id`, `execution_order`) VALUES + (3, 7, 1), -- 블로그_템플릿_선택 + (3, 8, 2), -- AI_콘텐츠_생성 + (3, 9, 3), -- 콘텐츠_검수_및_최적화 + (3, 10, 4); -- 블로그_플랫폼_발행 + +-- 스케줄 설정 (매일 오전 8시 실행) +INSERT INTO `schedule` (`workflow_id`, `cron_expression`, `parameters`, `is_active`, `created_by`) VALUES + (1, '0 0 8 * * *', JSON_OBJECT( + 'timezone', 'Asia/Seoul', + 'retry_count', 3, + 'timeout_minutes', 60, + 'notification_email', 'admin@icebang.site' + ), TRUE, 1); + +-- 사용자별 설정 (관리자용) +INSERT INTO `user_config` (`user_id`, `type`, `name`, `json`, `is_active`) VALUES + (1, 'workflow_preference', '트렌드_블로그_설정', JSON_OBJECT( + 'preferred_keywords', JSON_ARRAY('테크', 'IT', '트렌드', '리뷰'), + 'blog_style', 'casual', + 'auto_publish', false, + 'notification_enabled', true + ), TRUE); \ No newline at end of file From 7a147bd7206313d96079b56c64e4f5663fa778fe Mon Sep 17 00:00:00 2001 From: can019 Date: Mon, 15 Sep 2025 12:45:58 +0900 Subject: [PATCH 15/57] =?UTF-8?q?chore:=20v0.0.4=20erd=20=EC=A0=81?= =?UTF-8?q?=EC=9A=A9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/main/resources/sql/01-schema.sql | 39 ++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/apps/user-service/src/main/resources/sql/01-schema.sql b/apps/user-service/src/main/resources/sql/01-schema.sql index f09418b0..2f1d4655 100644 --- a/apps/user-service/src/main/resources/sql/01-schema.sql +++ b/apps/user-service/src/main/resources/sql/01-schema.sql @@ -304,4 +304,41 @@ ALTER TABLE `user` ADD COLUMN `joined_at` timestamp NULL; ALTER TABLE `department` - ADD COLUMN `description` varchar(100) NULL; \ No newline at end of file + ADD COLUMN `description` varchar(100) NULL; + +-- v0.4 +-- 기존 execution_log 테이블 수정 +ALTER TABLE `execution_log` +-- 새로운 컬럼 추가 + ADD COLUMN `run_id` bigint unsigned NULL COMMENT 'workflow_run_id, job_run_id, task_run_id' AFTER `source_id`, +ADD COLUMN `status` varchar(20) NULL COMMENT 'success, failed, warning, running' AFTER `log_level`, +ADD COLUMN `duration_ms` int unsigned NULL COMMENT '실행 시간 (밀리초)' AFTER `executed_at`, +ADD COLUMN `error_code` varchar(50) NULL COMMENT '에러 코드' AFTER `duration_ms`, + +-- 예비 컬럼 (향후 확장용) +ADD COLUMN `reserved1` varchar(100) NULL COMMENT '예비 컬럼 1', +ADD COLUMN `reserved2` varchar(100) NULL COMMENT '예비 컬럼 2', +ADD COLUMN `reserved3` int NULL COMMENT '예비 컬럼 3', +ADD COLUMN `reserved4` json NULL COMMENT '예비 컬럼 4', +ADD COLUMN `reserved5` timestamp NULL COMMENT '예비 컬럼 5'; + +-- 기존 컬럼 수정 +ALTER TABLE `execution_log` + MODIFY COLUMN `log_message` varchar(500) NOT NULL COMMENT '요약 메시지', + MODIFY COLUMN `executed_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '실행 시간'; + +-- 기존 불필요한 컬럼 제거 (있다면) +ALTER TABLE `execution_log` +DROP COLUMN IF EXISTS `config_snapshot`; + +-- 새로운 인덱스 추가 +ALTER TABLE `execution_log` + ADD INDEX `idx_run_id` (`run_id`), +ADD INDEX `idx_log_level_status` (`log_level`, `status`), +ADD INDEX `idx_error_code` (`error_code`), +ADD INDEX `idx_duration` (`duration_ms`); + +-- 기존 인덱스 수정 (복합 인덱스 최적화) +ALTER TABLE `execution_log` +DROP INDEX IF EXISTS `idx_source_id_type`, +ADD INDEX `idx_execution_type_source` (`execution_type`, `source_id`); \ No newline at end of file From 1050dd358230bee40b77274e589f5bc042d30445 Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Mon, 15 Sep 2025 15:38:28 +0900 Subject: [PATCH 16/57] =?UTF-8?q?Loki,=20Grafana=20=EB=A1=9C=EC=BB=AC=20?= =?UTF-8?q?=EC=84=B8=ED=8C=85=20=EB=B0=8F=20log=20=EC=B2=98=EB=A6=AC=20(#9?= =?UTF-8?q?5)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Docker compose loki, grafana * chore: Sync appender 기반 grafana 연결 * chore: fix test --- apps/user-service/build.gradle | 5 + .../icebang/global/filter/LoggingFilter.java | 3 +- .../application-test-integration.yml | 2 +- .../src/main/resources/log4j2-develop.yml | 133 ++++-------------- .../src/main/resources/sql/01-schema.sql | 59 ++++---- docker/local/docker-compose.yml | 37 ++++- 6 files changed, 94 insertions(+), 145 deletions(-) diff --git a/apps/user-service/build.gradle b/apps/user-service/build.gradle index 624067f6..d2ffcb1e 100644 --- a/apps/user-service/build.gradle +++ b/apps/user-service/build.gradle @@ -54,6 +54,11 @@ dependencies { implementation 'org.apache.logging.log4j:log4j-slf4j2-impl:2.22.1' implementation 'org.apache.logging.log4j:log4j-jul:2.22.1' implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml' + implementation 'org.apache.logging.log4j:log4j-layout-template-json:2.22.1' + + implementation 'org.apache.httpcomponents:httpclient:4.5.14' + implementation 'org.apache.httpcomponents:httpcore:4.4.16' + implementation 'pl.tkowalcz.tjahzi:log4j2-appender-nodep:0.9.17' // 비동기 로깅 implementation 'com.lmax:disruptor:3.4.4' diff --git a/apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java b/apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java index e89f2d80..0a782839 100644 --- a/apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java +++ b/apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java @@ -30,7 +30,8 @@ protected void doFilterInternal( traceId = UUID.randomUUID().toString(); } - MDC.put("traceId", traceId.substring(0, 8)); + // MDC.put("traceId", traceId.substring(0, 8)); + MDC.put("traceId", traceId); // ⭐️ 요청 객체에 attribute로 traceId를 저장하여 컨트롤러 등에서 사용할 수 있게 함 request.setAttribute("X-Request-ID", traceId); diff --git a/apps/user-service/src/main/resources/application-test-integration.yml b/apps/user-service/src/main/resources/application-test-integration.yml index 0ed34f36..526cf151 100644 --- a/apps/user-service/src/main/resources/application-test-integration.yml +++ b/apps/user-service/src/main/resources/application-test-integration.yml @@ -39,4 +39,4 @@ mybatis: map-underscore-to-camel-case: true logging: - config: classpath:log4j2-develop.yml \ No newline at end of file + config: classpath:log4j2-test-unit.yml \ No newline at end of file diff --git a/apps/user-service/src/main/resources/log4j2-develop.yml b/apps/user-service/src/main/resources/log4j2-develop.yml index f900c3b1..5303e6ff 100644 --- a/apps/user-service/src/main/resources/log4j2-develop.yml +++ b/apps/user-service/src/main/resources/log4j2-develop.yml @@ -1,133 +1,50 @@ Configuration: + status: DEBUG name: develop properties: property: - - name: "log-path" - value: "./logs" - - name: "charset-UTF-8" - value: "UTF-8" - # 통일된 콘솔 패턴 - 모든 로그에 RequestId 포함 - - name: "console-layout-pattern" - value: "%highlight{[%-5level]} [%X{traceId}] %d{MM-dd HH:mm:ss} [%t] %n %msg%n%n" - # 파일용 상세 패턴 - RequestId 포함 - - name: "file-layout-pattern" - value: "[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n" - # 로그 파일 경로들 - - name: "info-log" - value: ${log-path}/user-service/info.log - - name: "error-log" - value: ${log-path}/user-service/error.log - - name: "auth-log" - value: ${log-path}/user-service/auth.log - - name: "json-log" - value: ${log-path}/user-service/json-info.log + - name: "app-name" + value: "${env:APP_NAME:-user-service-app}" - # [Appenders] 로그 기록방식 정의 Appenders: - # 통일된 콘솔 출력 + # 콘솔 appender Console: name: console-appender target: SYSTEM_OUT PatternLayout: - pattern: ${console-layout-pattern} + pattern: "[%-5level] [%X{traceId}] %d{MM-dd HH:mm:ss} [%t] %msg%n" - # 롤링 파일 로그 - RollingFile: - name: rolling-file-appender - fileName: ${log-path}/rolling-file.log - filePattern: "logs/archive/rolling-file.log.%d{yyyy-MM-dd-hh-mm}_%i.gz" + # Tjahzi Loki Appender (올바른 문법) + Loki: + name: loki-appender + host: localhost + port: 3100 PatternLayout: - charset: ${charset-UTF-8} - pattern: ${file-layout-pattern} - Policies: - SizeBasedTriggeringPolicy: - size: "200KB" - TimeBasedTriggeringPolicy: - interval: "1" - DefaultRollOverStrategy: - max: "30" - fileIndex: "max" + pattern: "[%-5level] [%X{traceId}] %d{MM-dd HH:mm:ss} [%t] %msg%n" + Label: + - name: "app" + value: "${app-name}" # 고정 값 + - name: "service" + value: "user-service" # 고정 값 + - name: "traceId" + value: "${ctx:traceId}" # MDC에서 가져올 값만 넣음 - # 파일 로그들 - File: - - name: file-info-appender - fileName: ${info-log} - PatternLayout: - pattern: ${file-layout-pattern} - - name: file-error-appender - fileName: ${error-log} - PatternLayout: - pattern: ${file-layout-pattern} - - name: file-auth-appender - fileName: ${auth-log} - PatternLayout: - pattern: ${file-layout-pattern} - - name: file-json-info-appender - fileName: ${json-log} - PatternLayout: - pattern: ${file-layout-pattern} - - # [Loggers] 로그 출력 범위를 정의 Loggers: - # [Loggers - Root] 모든 로그를 기록하는 최상위 로그를 정의 Root: - level: OFF + level: INFO AppenderRef: - ref: console-appender - - ref: rolling-file-appender - # [Loggers - Loggers] 특정 패키지나 클래스에 대한 로그를 정의 Logger: - # 1. Spring Framework 로그 - - name: org.springframework - additivity: "false" - level: DEBUG - AppenderRef: - - ref: console-appender - - ref: file-info-appender - - ref: file-error-appender - - # 2. 애플리케이션 로그 + # 애플리케이션 로그만 Loki로 전송 (additivity 문법 수정) - name: site.icebang - additivity: "false" - level: TRACE + additivity: false + level: INFO AppenderRef: - ref: console-appender - - ref: file-info-appender - - ref: file-error-appender + - ref: loki-appender - # 3. HikariCP 로그 비활성화 + # HikariCP 로그 비활성화 - name: com.zaxxer.hikari - level: OFF - - # 4. Spring Security 로그 - 인증/인가 추적에 중요 - - name: org.springframework.security - level: DEBUG - additivity: "false" - AppenderRef: - - ref: console-appender - - ref: file-auth-appender - - # 5. 웹 요청 로그 - 요청 처리 과정 추적 - - name: org.springframework.web - level: DEBUG - additivity: "false" - AppenderRef: - - ref: console-appender - - ref: file-info-appender - - # 6. 트랜잭션 로그 - DB 작업 추적 - - name: org.springframework.transaction - level: DEBUG - additivity: "false" - AppenderRef: - - ref: console-appender - - ref: file-info-appender - - - name: site.icebang.domain.auth.mapper - level: DEBUG - additivity: "false" - AppenderRef: - - ref: console-appender - - ref: file-info-appender \ No newline at end of file + level: OFF \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/01-schema.sql b/apps/user-service/src/main/resources/sql/01-schema.sql index 2f1d4655..31242c33 100644 --- a/apps/user-service/src/main/resources/sql/01-schema.sql +++ b/apps/user-service/src/main/resources/sql/01-schema.sql @@ -308,37 +308,28 @@ ALTER TABLE `department` -- v0.4 -- 기존 execution_log 테이블 수정 -ALTER TABLE `execution_log` --- 새로운 컬럼 추가 - ADD COLUMN `run_id` bigint unsigned NULL COMMENT 'workflow_run_id, job_run_id, task_run_id' AFTER `source_id`, -ADD COLUMN `status` varchar(20) NULL COMMENT 'success, failed, warning, running' AFTER `log_level`, -ADD COLUMN `duration_ms` int unsigned NULL COMMENT '실행 시간 (밀리초)' AFTER `executed_at`, -ADD COLUMN `error_code` varchar(50) NULL COMMENT '에러 코드' AFTER `duration_ms`, - --- 예비 컬럼 (향후 확장용) -ADD COLUMN `reserved1` varchar(100) NULL COMMENT '예비 컬럼 1', -ADD COLUMN `reserved2` varchar(100) NULL COMMENT '예비 컬럼 2', -ADD COLUMN `reserved3` int NULL COMMENT '예비 컬럼 3', -ADD COLUMN `reserved4` json NULL COMMENT '예비 컬럼 4', -ADD COLUMN `reserved5` timestamp NULL COMMENT '예비 컬럼 5'; - --- 기존 컬럼 수정 -ALTER TABLE `execution_log` - MODIFY COLUMN `log_message` varchar(500) NOT NULL COMMENT '요약 메시지', - MODIFY COLUMN `executed_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '실행 시간'; - --- 기존 불필요한 컬럼 제거 (있다면) -ALTER TABLE `execution_log` -DROP COLUMN IF EXISTS `config_snapshot`; - --- 새로운 인덱스 추가 -ALTER TABLE `execution_log` - ADD INDEX `idx_run_id` (`run_id`), -ADD INDEX `idx_log_level_status` (`log_level`, `status`), -ADD INDEX `idx_error_code` (`error_code`), -ADD INDEX `idx_duration` (`duration_ms`); - --- 기존 인덱스 수정 (복합 인덱스 최적화) -ALTER TABLE `execution_log` -DROP INDEX IF EXISTS `idx_source_id_type`, -ADD INDEX `idx_execution_type_source` (`execution_type`, `source_id`); \ No newline at end of file +-- 컬럼 추가 (한 번에 하나씩) +-- 컬럼 추가 +ALTER TABLE execution_log ADD COLUMN run_id BIGINT NULL; +ALTER TABLE execution_log ADD COLUMN status VARCHAR(20) NULL; +ALTER TABLE execution_log ADD COLUMN duration_ms INT NULL; +ALTER TABLE execution_log ADD COLUMN error_code VARCHAR(50) NULL; +ALTER TABLE execution_log ADD COLUMN reserved1 VARCHAR(100) NULL; +ALTER TABLE execution_log ADD COLUMN reserved2 VARCHAR(100) NULL; +ALTER TABLE execution_log ADD COLUMN reserved3 INT NULL; +ALTER TABLE execution_log ADD COLUMN reserved4 json NULL; +ALTER TABLE execution_log ADD COLUMN reserved5 TIMESTAMP NULL; + +-- 컬럼 수정 +ALTER TABLE execution_log MODIFY COLUMN log_message VARCHAR(500) NOT NULL; +ALTER TABLE execution_log MODIFY COLUMN executed_at TIMESTAMP NOT NULL; + +-- 컬럼 삭제 +ALTER TABLE execution_log DROP COLUMN config_snapshot; + +-- 인덱스 생성 (CREATE INDEX 별도) +CREATE INDEX idx_run_id ON execution_log(run_id); +CREATE INDEX idx_log_level_status ON execution_log(log_level, status); +CREATE INDEX idx_error_code ON execution_log(error_code); +CREATE INDEX idx_duration ON execution_log(duration_ms); +CREATE INDEX idx_execution_type_source ON execution_log(execution_type, source_id); diff --git a/docker/local/docker-compose.yml b/docker/local/docker-compose.yml index c0bf14fd..c69ea697 100644 --- a/docker/local/docker-compose.yml +++ b/docker/local/docker-compose.yml @@ -33,5 +33,40 @@ services: depends_on: - mariadb + loki: + image: grafana/loki:2.9.0 + container_name: loki + restart: unless-stopped + ports: + - "3100:3100" + command: -config.file=/etc/loki/local-config.yaml + volumes: + - loki_data:/loki + healthcheck: + test: ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:3100/ready || exit 1"] + interval: 10s + timeout: 5s + retries: 5 + + grafana: + image: grafana/grafana:10.1.0 + container_name: grafana + restart: unless-stopped + environment: + - GF_SECURITY_ADMIN_PASSWORD=admin + ports: + - "3030:3000" + volumes: + - grafana_data:/var/lib/grafana + depends_on: + - loki + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:3000/api/health || exit 1"] + interval: 30s + timeout: 10s + retries: 5 + volumes: - mariadb_data: \ No newline at end of file + mariadb_data: + loki_data: + grafana_data: \ No newline at end of file From 91db5c3b0505b832ce1cfd0b917ef955f162e1e5 Mon Sep 17 00:00:00 2001 From: bwnfo3 <142577603+bwnfo3@users.noreply.github.com> Date: Mon, 15 Sep 2025 17:15:43 +0900 Subject: [PATCH 17/57] =?UTF-8?q?=EB=A1=9C=EA=B7=B8=EC=95=84=EC=9B=83=20ap?= =?UTF-8?q?i=20(#96)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: UserLogoutFlowE2eTest 초안 * feat: UserLogoutFlowE2eTest * feat: AuthController에 logout api 추가 * feat: AuthApiIntegrationTest에 Logout 추가 * feat: h2 호환 스키마 파일 --- .../auth/controller/AuthController.java | 14 + .../src/main/resources/sql/01-schema-h2.sql | 328 ++++++++++++++++++ .../e2e/scenario/UserLogoutFlowE2eTest.java | 159 +++++++++ .../tests/auth/AuthApiIntegrationTest.java | 59 ++++ 4 files changed, 560 insertions(+) create mode 100644 apps/user-service/src/main/resources/sql/01-schema-h2.sql create mode 100644 apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java diff --git a/apps/user-service/src/main/java/site/icebang/domain/auth/controller/AuthController.java b/apps/user-service/src/main/java/site/icebang/domain/auth/controller/AuthController.java index d0a98142..2303cf74 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/auth/controller/AuthController.java +++ b/apps/user-service/src/main/java/site/icebang/domain/auth/controller/AuthController.java @@ -61,4 +61,18 @@ public ApiResponse checkSession(@AuthenticationPrincipal AuthCredential public ApiResponse getPermissions(@AuthenticationPrincipal AuthCredential user) { return ApiResponse.success(user); } + + @PostMapping("/logout") + public ApiResponse logout(HttpServletRequest request) { + // SecurityContext 정리 + SecurityContextHolder.clearContext(); + + // 세션 무효화 + HttpSession session = request.getSession(false); + if (session != null) { + session.invalidate(); + } + + return ApiResponse.success(null); + } } diff --git a/apps/user-service/src/main/resources/sql/01-schema-h2.sql b/apps/user-service/src/main/resources/sql/01-schema-h2.sql new file mode 100644 index 00000000..018ebb1d --- /dev/null +++ b/apps/user-service/src/main/resources/sql/01-schema-h2.sql @@ -0,0 +1,328 @@ +-- H2 데이터베이스 호환 스키마 (테스트용) +-- MySQL의 unsigned, AFTER 절 등을 H2 호환으로 변경 + +CREATE TABLE `permission` ( + `id` int NOT NULL AUTO_INCREMENT, + `resource` varchar(100) NULL, + `description` varchar(255) NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `is_active` boolean DEFAULT TRUE, + `updated_by` bigint NULL, + `created_by` bigint NULL, + PRIMARY KEY (`id`) +); + +CREATE TABLE `organization` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `name` varchar(150) NULL, + `domain_name` varchar(100) NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +); + +CREATE TABLE `role` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `organization_id` bigint NULL, + `name` varchar(100) NULL, + `description` varchar(500) NULL, + PRIMARY KEY (`id`) +); + +CREATE TABLE `user` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `name` varchar(50) NULL, + `email` varchar(100) NULL, + `password` varchar(255) NULL, + `status` varchar(20) NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +); + +CREATE TABLE `department` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `organization_id` bigint NOT NULL, + `name` varchar(100) NULL, + PRIMARY KEY (`id`) +); + +CREATE TABLE `position` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `organization_id` bigint NOT NULL, + `title` varchar(100) NULL, + PRIMARY KEY (`id`) +); + +CREATE TABLE `user_organization` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `user_id` bigint NOT NULL, + `organization_id` bigint NOT NULL, + `position_id` bigint NOT NULL, + `department_id` bigint NOT NULL, + `employee_number` varchar(50) NULL, + `status` varchar(20) NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +); + +CREATE TABLE `role_permission` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `role_id` bigint NOT NULL, + `permission_id` int NOT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_role_permission` (`role_id`, `permission_id`) +); + +CREATE TABLE `user_role` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `role_id` bigint NOT NULL, + `user_organization_id` bigint NOT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_user_role` (`role_id`, `user_organization_id`) +); + +-- 성능 최적화를 위한 인덱스 +CREATE INDEX `idx_user_email` ON `user` (`email`); +CREATE INDEX `idx_user_status` ON `user` (`status`); +CREATE INDEX `idx_user_organization_user` ON `user_organization` (`user_id`); +CREATE INDEX `idx_user_organization_org` ON `user_organization` (`organization_id`); +CREATE INDEX `idx_user_organization_status` ON `user_organization` (`status`); +CREATE INDEX `idx_role_org` ON `role` (`organization_id`); +CREATE INDEX `idx_permission_resource` ON `permission` (`resource`); +CREATE INDEX `idx_permission_active` ON `permission` (`is_active`); + +CREATE TABLE `workflow` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `name` varchar(100) NOT NULL UNIQUE, + `description` text NULL, + `is_enabled` boolean DEFAULT TRUE, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `created_by` bigint NULL, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `updated_by` bigint NULL, + PRIMARY KEY (`id`) +); + +CREATE TABLE `schedule` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `workflow_id` bigint NOT NULL, + `cron_expression` varchar(50) NULL, + `parameters` json NULL, + `is_active` boolean DEFAULT TRUE, + `last_run_status` varchar(20) NULL, + `last_run_at` timestamp NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `created_by` bigint NULL, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `updated_by` bigint NULL, + PRIMARY KEY (`id`) +); + +CREATE TABLE `job` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `name` varchar(100) NOT NULL UNIQUE, + `description` text NULL, + `is_enabled` boolean DEFAULT TRUE, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `created_by` bigint NULL, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `updated_by` bigint NULL, + PRIMARY KEY (`id`) +); + +CREATE TABLE `task` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `name` varchar(100) NOT NULL UNIQUE, + `type` varchar(50) NULL, + `parameters` json NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +); + +CREATE TABLE `workflow_job` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `workflow_id` bigint NOT NULL, + `job_id` bigint NOT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_workflow_job` (`workflow_id`, `job_id`) +); + +CREATE TABLE `job_task` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `job_id` bigint NOT NULL, + `task_id` bigint NOT NULL, + `execution_order` int NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_job_task` (`job_id`, `task_id`) +); + +CREATE TABLE `execution_log` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `execution_type` varchar(20) NULL COMMENT 'task, schedule, job, workflow', + `source_id` bigint NULL COMMENT '모든 데이터에 대한 ID ex: job_id, schedule_id, task_id, ...', + `log_level` varchar(20) NULL, + `executed_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `log_message` text NULL, + `trace_id` char(36) NULL, + `config_snapshot` json NULL, + PRIMARY KEY (`id`), + INDEX `idx_source_id_type` (`source_id`, `execution_type`) +); + +CREATE TABLE `task_io_data` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `task_run_id` bigint NOT NULL, + `io_type` varchar(10) NOT NULL COMMENT 'INPUT, OUTPUT', + `name` varchar(100) NOT NULL COMMENT '파라미터/변수 이름', + `data_type` varchar(50) NOT NULL COMMENT 'string, number, json, file, etc', + `data_value` json NULL COMMENT '실제 데이터 값', + `data_size` bigint NULL COMMENT '데이터 크기 (bytes)', + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + INDEX `idx_task_io_task_run_id` (`task_run_id`), + INDEX `idx_task_io_type` (`io_type`), + INDEX `idx_task_io_name` (`name`) +); + +CREATE TABLE `config` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `target_type` varchar(50) NULL COMMENT 'user, job, workflow', + `target_id` bigint NULL, + `version` int NULL, + `json` json NULL, + `is_active` boolean DEFAULT TRUE, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `created_by` bigint NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_config_target` (`target_type`, `target_id`) +); + +CREATE TABLE `category` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `name` varchar(100) NULL, + `description` text NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +); + +CREATE TABLE `user_config` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `user_id` bigint NOT NULL, + `type` varchar(50) NULL, + `name` varchar(100) NULL, + `json` json NULL, + `is_active` boolean DEFAULT TRUE, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +); + +-- 인덱스 추가 (성능 최적화) +CREATE INDEX `idx_schedule_workflow` ON `schedule` (`workflow_id`); +CREATE INDEX `idx_job_enabled` ON `job` (`is_enabled`); +CREATE INDEX `idx_task_type` ON `task` (`type`); +CREATE INDEX `idx_workflow_enabled` ON `workflow` (`is_enabled`); +CREATE UNIQUE INDEX `uk_schedule_workflow` ON `schedule` (`workflow_id`); +CREATE UNIQUE INDEX `uk_job_name` ON `job` (`name`); +CREATE UNIQUE INDEX `uk_task_name` ON `task` (`name`); +CREATE UNIQUE INDEX `uk_workflow_name` ON `workflow` (`name`); +CREATE INDEX `idx_user_config_user` ON `user_config` (`user_id`); + +-- 워크플로우 실행 테이블 +CREATE TABLE `workflow_run` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `workflow_id` bigint NOT NULL, + `trace_id` char(36) NOT NULL, + `run_number` varchar(20) NULL, + `status` varchar(20) NULL COMMENT 'pending, running, success, failed, cancelled', + `trigger_type` varchar(20) NULL COMMENT 'manual, schedule, push, pull_request', + `started_at` timestamp NULL, + `finished_at` timestamp NULL, + `created_by` bigint NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `uk_workflow_run_trace` (`trace_id`), + INDEX `idx_workflow_run_status` (`status`), + INDEX `idx_workflow_run_workflow_id` (`workflow_id`), + INDEX `idx_workflow_run_created_at` (`created_at`) +); + +-- Job 실행 테이블 +CREATE TABLE `job_run` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `workflow_run_id` bigint NOT NULL, + `job_id` bigint NOT NULL, + `status` varchar(20) NULL COMMENT 'pending, running, success, failed, cancelled, skipped', + `started_at` timestamp NULL, + `finished_at` timestamp NULL, + `execution_order` int NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + INDEX `idx_job_run_workflow_run_id` (`workflow_run_id`), + INDEX `idx_job_run_status` (`status`), + INDEX `idx_job_run_job_id` (`job_id`) +); + +-- Task 실행 테이블 +CREATE TABLE `task_run` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `job_run_id` bigint NOT NULL, + `task_id` bigint NOT NULL, + `status` varchar(20) NULL COMMENT 'pending, running, success, failed, cancelled, skipped', + `started_at` timestamp NULL, + `finished_at` timestamp NULL, + `execution_order` int NULL, + `created_at` timestamp DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + INDEX `idx_task_run_job_run_id` (`job_run_id`), + INDEX `idx_task_run_status` (`status`), + INDEX `idx_task_run_task_id` (`task_id`) +); + +-- v0.0.3 - H2 호환 버전 +DROP TABLE IF EXISTS `config`; + +-- H2에서는 한 번에 하나씩 컬럼 추가 +ALTER TABLE `workflow_job` ADD COLUMN `execution_order` INT NULL; + +ALTER TABLE `schedule` ADD COLUMN `schedule_text` varchar(20) NULL; + +ALTER TABLE `workflow` ADD COLUMN `default_config` json NULL; + +ALTER TABLE `user` ADD COLUMN `joined_at` timestamp NULL; + +ALTER TABLE `department` ADD COLUMN `description` varchar(100) NULL; + +-- v0.4 - H2 호환 버전 (AFTER 절 제거, unsigned 제거, 개별 ALTER 구문으로 분리) +-- execution_log 테이블 컬럼 추가 (H2 호환) +ALTER TABLE `execution_log` ADD COLUMN `run_id` bigint NULL; +ALTER TABLE `execution_log` ADD COLUMN `status` varchar(20) NULL; +ALTER TABLE `execution_log` ADD COLUMN `duration_ms` int NULL; +ALTER TABLE `execution_log` ADD COLUMN `error_code` varchar(50) NULL; +ALTER TABLE `execution_log` ADD COLUMN `reserved1` varchar(100) NULL; +ALTER TABLE `execution_log` ADD COLUMN `reserved2` varchar(100) NULL; +ALTER TABLE `execution_log` ADD COLUMN `reserved3` int NULL; +ALTER TABLE `execution_log` ADD COLUMN `reserved4` json NULL; +ALTER TABLE `execution_log` ADD COLUMN `reserved5` timestamp NULL; + +-- 기존 컬럼 수정 (H2 호환) +ALTER TABLE `execution_log` ALTER COLUMN `log_message` varchar(500) NOT NULL; +ALTER TABLE `execution_log` ALTER COLUMN `executed_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP; + +-- 기존 불필요한 컬럼 제거 +ALTER TABLE `execution_log` DROP COLUMN IF EXISTS `config_snapshot`; + +-- 새로운 인덱스 추가 +CREATE INDEX `idx_run_id` ON `execution_log` (`run_id`); +CREATE INDEX `idx_log_level_status` ON `execution_log` (`log_level`, `status`); +CREATE INDEX `idx_error_code` ON `execution_log` (`error_code`); +CREATE INDEX `idx_duration` ON `execution_log` (`duration_ms`); + +-- 기존 인덱스 수정 +DROP INDEX IF EXISTS `idx_source_id_type`; +CREATE INDEX `idx_execution_type_source` ON `execution_log` (`execution_type`, `source_id`); diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java new file mode 100644 index 00000000..8fea2764 --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java @@ -0,0 +1,159 @@ +package site.icebang.e2e.scenario; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.http.*; +import org.springframework.test.context.jdbc.Sql; + +import site.icebang.e2e.setup.annotation.E2eTest; +import site.icebang.e2e.setup.support.E2eTestSupport; + +@Sql( + value = "classpath:sql/01-insert-internal-users.sql", + executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS) +@DisplayName("사용자 로그아웃 플로우 E2E 테스트") +@E2eTest +class UserLogoutFlowE2eTest extends E2eTestSupport { + + @SuppressWarnings("unchecked") + @Test + @DisplayName("정상 로그아웃 전체 플로우 - TDD REd 단계") + void completeUserRegistrationFlow_shouldFailBecauseApiNotImplemented() throws Exception { + logStep(1, "관리자 로그인 (최우선)"); + + // 1. 관리자 로그인으로 인증 상태 확립 + Map loginRequest = new HashMap<>(); + loginRequest.put("email", "admin@icebang.site"); + loginRequest.put("password", "qwer1234!A"); + + HttpHeaders loginHeaders = new HttpHeaders(); + loginHeaders.setContentType(MediaType.APPLICATION_JSON); + loginHeaders.set("Origin", "https://admin.icebang.site"); + loginHeaders.set("Referer", "https://admin.icebang.site/"); + + HttpEntity> loginEntity = new HttpEntity<>(loginRequest, loginHeaders); + + ResponseEntity loginResponse = + restTemplate.postForEntity(getV0ApiUrl("/auth/login"), loginEntity, Map.class); + + assertThat(loginResponse.getStatusCode()).isEqualTo(HttpStatus.OK); + assertThat((Boolean) loginResponse.getBody().get("success")).isTrue(); + + logSuccess("관리자 로그인 성공 - 인증 상태 확립 완료"); + + logStep(2, "로그인 상태에서 보호된 리소스 접근 확인"); + + // 로그인 응답에서 세션 쿠키 추출 + String sessionCookie = null; + java.util.List cookies = loginResponse.getHeaders().get("Set-Cookie"); + if (cookies != null) { + for (String cookie : cookies) { + if (cookie.startsWith("JSESSIONID")) { + sessionCookie = cookie.split(";")[0]; // JSESSIONID=XXX 부분만 추출 + break; + } + } + } + + // 2. 로그인된 상태에서 본인 프로필 조회로 인증 상태 확인 + // /v0/users/me는 인증된 사용자만 접근 가능한 일반적인 API + HttpHeaders authenticatedHeaders = new HttpHeaders(); + if (sessionCookie != null) { + authenticatedHeaders.set("Cookie", sessionCookie); + } + + HttpEntity authenticatedEntity = new HttpEntity<>(authenticatedHeaders); + ResponseEntity beforeLogoutResponse = + restTemplate.exchange( + getV0ApiUrl("/users/me"), HttpMethod.GET, authenticatedEntity, Map.class); + + assertThat(beforeLogoutResponse.getStatusCode()).isEqualTo(HttpStatus.OK); + assertThat((Boolean) beforeLogoutResponse.getBody().get("success")).isTrue(); + assertThat(beforeLogoutResponse.getBody().get("data")).isNotNull(); + + logSuccess("인증된 상태에서 본인 프로필 조회 성공"); + + // 3. 로그아웃 API 호출 + HttpHeaders logoutHeaders = new HttpHeaders(); + logoutHeaders.setContentType(MediaType.APPLICATION_JSON); + logoutHeaders.set("Origin", "https://admin.icebang.site"); + logoutHeaders.set("Referer", "https://admin.icebang.site/"); + + // 로그아웃 요청에도 세션 쿠키 포함 + if (sessionCookie != null) { + logoutHeaders.set("Cookie", sessionCookie); + } + + HttpEntity> logoutEntity = new HttpEntity<>(new HashMap<>(), logoutHeaders); + + try { + ResponseEntity logoutResponse = + restTemplate.postForEntity(getV0ApiUrl("/auth/logout"), logoutEntity, Map.class); + logStep(4, "로그아웃 응답 검증 (API구현 돼있으면)"); + + logSuccess("로그아웃 API 호출 성공"); + + logStep(5, "로그아웃 후 인증 무효화 확인"); + + // 5. 로그아웃 후 동일한 프로필 API 접근 시 인증 실패 확인 + HttpEntity afterLogoutEntity = new HttpEntity<>(authenticatedHeaders); + ResponseEntity afterLogoutResponse = + restTemplate.exchange( + getV0ApiUrl("/users/me"), HttpMethod.GET, afterLogoutEntity, Map.class); + + // 핵심 검증: 로그아웃 후에는 인증 실패로 401 또는 403 응답이어야 함 + assertThat(afterLogoutResponse.getStatusCode()) + .withFailMessage( + "로그아웃 후 프로필 접근이 차단되어야 합니다. 현재 상태코드: %s", afterLogoutResponse.getStatusCode()) + .isIn(HttpStatus.UNAUTHORIZED, HttpStatus.FORBIDDEN); + logSuccess("로그아웃 후 프로필 접근 차단 확인 - 인증 무효화 성공"); + + logCompletion("일반 사용자 로그아웃 플로우"); + + } catch (org.springframework.web.client.HttpClientErrorException.NotFound ex) { + logError("예상된 실패: 로그아웃 API가 구현되지 않음 (404 Not Found"); + logError("에러 메시지 : " + ex.getMessage()); + + fail( + "로그아웃 API (/v0/auth/logout)가 구현되지 않았습니다. " + + "다음 단계에서 API를 구현해야 합니다. 에러: " + + ex.getMessage()); + } catch (Exception ex) { + logError("예상치 못한 오류 발생: " + ex.getClass().getSimpleName()); + logError("에러 메시지: " + ex.getMessage()); + + // 기타 예상치 못한 에러도 기록 + fail("로그아웃 API 호출 중 예상치 못한 오류 발생: " + ex.getMessage()); + } + } + + /** 일반 사용자 로그인을 수행하는 헬퍼 메서드 관리자가 아닌 콘텐츠팀장으로 로그인 */ + private void performRegularUserLogin() { + Map loginRequest = new HashMap<>(); + loginRequest.put("email", "viral.jung@icebang.site"); + loginRequest.put("password", "qwer1234!A"); // 실제 비밀번호 확인 필요 + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.set("Origin", "https://admin.icebang.site"); + headers.set("Referer", "https://admin.icebang.site/"); + + HttpEntity> entity = new HttpEntity<>(loginRequest, headers); + + ResponseEntity response = + restTemplate.postForEntity(getV0ApiUrl("/auth/login"), entity, Map.class); + + if (response.getStatusCode() != HttpStatus.OK) { + logError("일반 사용자 로그인 실패: " + response.getStatusCode()); + throw new RuntimeException("Regular user login failed for logout test"); + } + + logSuccess("일반 사용자 로그인 완료 (로그아웃 테스트용)"); + } +} diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java index 5c538105..4fe3b00d 100644 --- a/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java @@ -13,6 +13,7 @@ import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.springframework.http.*; +import org.springframework.mock.web.MockHttpSession; import org.springframework.restdocs.payload.JsonFieldType; import org.springframework.test.context.jdbc.Sql; import org.springframework.transaction.annotation.Transactional; @@ -79,4 +80,62 @@ void login_success() throws Exception { .description("HTTP 상태")) .build()))); } + + @Test + @DisplayName("사용자 로그아웃 성공") + void logout_success() throws Exception { + // given - 먼저 로그인 + Map loginRequest = new HashMap<>(); + loginRequest.put("email", "admin@icebang.site"); + loginRequest.put("password", "qwer1234!A"); + + MockHttpSession session = new MockHttpSession(); + + // 로그인 먼저 수행 + mockMvc + .perform( + post(getApiUrlForDocs("/v0/auth/login")) + .contentType(MediaType.APPLICATION_JSON) + .session(session) + .content(objectMapper.writeValueAsString(loginRequest))) + .andExpect(status().isOk()); + + // when & then - 로그아웃 수행 + mockMvc + .perform( + post(getApiUrlForDocs("/v0/auth/logout")) + .contentType(MediaType.APPLICATION_JSON) + .session(session) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.status").value("OK")) + .andExpect(jsonPath("$.message").value("OK")) + .andExpect(jsonPath("$.data").isEmpty()) + .andDo( + document( + "auth-logout", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Authentication") + .summary("사용자 로그아웃") + .description("현재 인증된 사용자의 세션을 무효화합니다") + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부"), + fieldWithPath("data") + .type(JsonFieldType.NULL) + .description("응답 데이터 (로그아웃 성공 시 null)"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태")) + .build()))); + } } From e36e407f7bc826ccfe157314624a249c87306ad8 Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Mon, 15 Sep 2025 18:26:12 +0900 Subject: [PATCH 18/57] =?UTF-8?q?Grafana=20Loki=20=EB=A1=9C=EA=B9=85=20(de?= =?UTF-8?q?velop)=20(#97)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Develop logging appender 복구 * chore: Trace id, span id 도입 * chore: Source id, run id label로 분리 --- apps/user-service/build.gradle | 4 + .../icebang/global/filter/LoggingFilter.java | 44 ------- .../main/resources/application-develop.yml | 6 + .../src/main/resources/log4j2-develop.yml | 120 ++++++++++++++++-- .../src/main/resources/log4j2-production.yml | 2 +- 5 files changed, 119 insertions(+), 57 deletions(-) delete mode 100644 apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java diff --git a/apps/user-service/build.gradle b/apps/user-service/build.gradle index d2ffcb1e..8aa7715a 100644 --- a/apps/user-service/build.gradle +++ b/apps/user-service/build.gradle @@ -63,6 +63,10 @@ dependencies { // 비동기 로깅 implementation 'com.lmax:disruptor:3.4.4' + implementation "io.micrometer:micrometer-tracing-bridge-brave" + implementation "io.micrometer:micrometer-tracing" + implementation "org.springframework.boot:spring-boot-starter-actuator" + // Lombok compileOnly 'org.projectlombok:lombok:1.18.30' annotationProcessor 'org.projectlombok:lombok:1.18.30' diff --git a/apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java b/apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java deleted file mode 100644 index 0a782839..00000000 --- a/apps/user-service/src/main/java/site/icebang/global/filter/LoggingFilter.java +++ /dev/null @@ -1,44 +0,0 @@ -package site.icebang.global.filter; - -import java.io.IOException; -import java.util.UUID; - -import org.slf4j.MDC; -import org.springframework.stereotype.Component; -import org.springframework.web.filter.OncePerRequestFilter; - -import jakarta.servlet.FilterChain; -import jakarta.servlet.ServletException; -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; - -@Component -public class LoggingFilter extends OncePerRequestFilter { - - public static final String TRACE_ID_HEADER = "X-Request-ID"; - - @Override - protected void doFilterInternal( - HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) - throws ServletException, IOException { - - // 다른 시스템에서 이미 전달한 Trace ID가 있는지 확인 - String traceId = request.getHeader(TRACE_ID_HEADER); - - // 없다면 새로 생성 (요청의 시작점) - if (traceId == null || traceId.isEmpty()) { - traceId = UUID.randomUUID().toString(); - } - - // MDC.put("traceId", traceId.substring(0, 8)); - MDC.put("traceId", traceId); - - // ⭐️ 요청 객체에 attribute로 traceId를 저장하여 컨트롤러 등에서 사용할 수 있게 함 - request.setAttribute("X-Request-ID", traceId); - - // 응답 헤더에 traceId를 넣어주면 클라이언트가 추적하기 용이 - response.setHeader(TRACE_ID_HEADER, traceId); - - filterChain.doFilter(request, response); - } -} diff --git a/apps/user-service/src/main/resources/application-develop.yml b/apps/user-service/src/main/resources/application-develop.yml index b8cb1648..8c24f49d 100644 --- a/apps/user-service/src/main/resources/application-develop.yml +++ b/apps/user-service/src/main/resources/application-develop.yml @@ -44,3 +44,9 @@ mybatis: logging: config: classpath:log4j2-develop.yml + +management: + tracing: + enabled: true + sampling: + probability: 1.0 # 100% 샘플링 (개발 환경에서만 권장) \ No newline at end of file diff --git a/apps/user-service/src/main/resources/log4j2-develop.yml b/apps/user-service/src/main/resources/log4j2-develop.yml index 5303e6ff..69833c98 100644 --- a/apps/user-service/src/main/resources/log4j2-develop.yml +++ b/apps/user-service/src/main/resources/log4j2-develop.yml @@ -5,46 +5,142 @@ Configuration: properties: property: - name: "app-name" - value: "${env:APP_NAME:-user-service-app}" + value: "user-service" + - name: "log-path" + value: "./logs" + - name: "charset-UTF-8" + value: "UTF-8" + # DEBUG 환경용 콘솔 패턴 - 더 간단하고 가독성 좋게 + - name: "console-layout-pattern" + value: "%highlight{[%-5level]} [%X{traceId}] [%X{spanId}] %d{HH:mm:ss} [%t] %n %logger{20} - %msg%n%n " + # 파일용 패턴 + - name: "file-layout-pattern" + value: "[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n" + # 개발 환경용 로그 파일들 + - name: "dev-log" + value: ${log-path}/develop/app.log + - name: "error-log" + value: ${log-path}/develop/error.log Appenders: - # 콘솔 appender + # 콘솔 출력 - 개발 시 주요 출력 Console: name: console-appender target: SYSTEM_OUT PatternLayout: - pattern: "[%-5level] [%X{traceId}] %d{MM-dd HH:mm:ss} [%t] %msg%n" + pattern: ${console-layout-pattern} + disableAnsi: false - # Tjahzi Loki Appender (올바른 문법) + # Loki Appender - 개발환경 모니터링용 Loki: name: loki-appender host: localhost port: 3100 - PatternLayout: - pattern: "[%-5level] [%X{traceId}] %d{MM-dd HH:mm:ss} [%t] %msg%n" + JsonLayout: + compact: true + eventEol: true + includeStacktrace: true + KeyValuePair: + - key: "app" + value: "${app-name}" + - key: "env" + value: "develop" Label: - name: "app" - value: "${app-name}" # 고정 값 - - name: "service" - value: "user-service" # 고정 값 + value: "${app-name}" + - name: "env" + value: "develop" - name: "traceId" - value: "${ctx:traceId}" # MDC에서 가져올 값만 넣음 + value: "${ctx:traceId}" + - name: "spanId" + value: "${ctx:spanId}" + - name: "executionType" + value: "${ctx:executionType:-application}" + - name: "sourceId" + value: "${ctx:sourceId}" + - name: "runId" + value: "${ctx:runId}" + + # 개발용 일반 로그 파일 + File: + - name: file-dev-appender + fileName: ${dev-log} + PatternLayout: + pattern: ${file-layout-pattern} + - name: file-error-appender + fileName: ${error-log} + PatternLayout: + pattern: ${file-layout-pattern} + ThresholdFilter: + level: ERROR Loggers: + # Root 로거 - 개발환경에서는 기본적으로 INFO 레벨 Root: level: INFO AppenderRef: - ref: console-appender Logger: - # 애플리케이션 로그만 Loki로 전송 (additivity 문법 수정) + # 애플리케이션 로그 - 개발 시 모든 레벨 + Loki 전송 - name: site.icebang + additivity: false + level: DEBUG + AppenderRef: + - ref: console-appender + - ref: loki-appender + - ref: file-dev-appender + - ref: file-error-appender + + # Spring Framework - 개발 시 필요한 정보만 + - name: org.springframework additivity: false level: INFO AppenderRef: - ref: console-appender + - ref: file-dev-appender + + # Spring Security - 인증 디버깅용 + - name: org.springframework.security + level: DEBUG + additivity: false + AppenderRef: + - ref: console-appender + - ref: file-dev-appender + - ref: loki-appender + + # 웹 요청 로그 - API 개발 시 유용 + - name: org.springframework.web + level: DEBUG + additivity: false + AppenderRef: + - ref: console-appender + - ref: file-dev-appender + - ref: loki-appender + + # 트랜잭션 로그 - DB 작업 디버깅 + - name: org.springframework.transaction + level: DEBUG + additivity: false + AppenderRef: + - ref: console-appender + - ref: file-dev-appender - ref: loki-appender # HikariCP 로그 비활성화 - name: com.zaxxer.hikari - level: OFF \ No newline at end of file + level: OFF + + # SQL 로그 - 개발 시 쿼리 확인용 (필요시 활성화) + - name: org.hibernate.SQL + level: DEBUG + additivity: false + AppenderRef: + - ref: console-appender + + # 파라미터 바인딩 로그 (필요시 활성화) + - name: org.hibernate.type.descriptor.sql.BasicBinder + level: TRACE + additivity: false + AppenderRef: + - ref: console-appender \ No newline at end of file diff --git a/apps/user-service/src/main/resources/log4j2-production.yml b/apps/user-service/src/main/resources/log4j2-production.yml index 31393458..2e7d282b 100644 --- a/apps/user-service/src/main/resources/log4j2-production.yml +++ b/apps/user-service/src/main/resources/log4j2-production.yml @@ -1,5 +1,5 @@ Configuration: - name: develop + name: production properties: property: From 71f3b5be1b6b70ff677bc78e77093e1d6ffe9203 Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Mon, 15 Sep 2025 18:37:32 +0900 Subject: [PATCH 19/57] =?UTF-8?q?Pagination=20dto=20=EB=B0=8F=20=EA=B3=B5?= =?UTF-8?q?=ED=86=B5=20service=20(#94)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Paging request, reponse dto * chore: Pagination controller, service example * chore: workflows api 앞에 v0 추가 * refactor: 페이지네이션 구조 개선 PageService를 PageableService 인터페이스로 변경하여 트랜잭션 처리 개선 --- .../site/icebang/common/dto/PageParams.java | 25 ++++++ .../site/icebang/common/dto/PageResult.java | 77 +++++++++++++++++++ .../common/service/PageableService.java | 8 ++ .../controller/WorkflowController.java | 28 +++++++ .../domain/workflow/dto/WorkflowCardDto.java | 6 ++ .../workflow/service/WorkflowService.java | 27 +++++++ 6 files changed, 171 insertions(+) create mode 100644 apps/user-service/src/main/java/site/icebang/common/dto/PageParams.java create mode 100644 apps/user-service/src/main/java/site/icebang/common/dto/PageResult.java create mode 100644 apps/user-service/src/main/java/site/icebang/common/service/PageableService.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java diff --git a/apps/user-service/src/main/java/site/icebang/common/dto/PageParams.java b/apps/user-service/src/main/java/site/icebang/common/dto/PageParams.java new file mode 100644 index 00000000..5f2f0d30 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/common/dto/PageParams.java @@ -0,0 +1,25 @@ +package site.icebang.common.dto; + +import lombok.Data; + +@Data +public class PageParams { + private int current = 1; + private int pageSize = 10; + private String search; + private String[] sorters; + private String[] filters; + + // 계산된 offset + public int getOffset() { + return (current - 1) * pageSize; + } + + public boolean hasSearch() { + return search != null && !search.trim().isEmpty(); + } + + public boolean hasSorters() { + return sorters != null && sorters.length > 0; + } +} diff --git a/apps/user-service/src/main/java/site/icebang/common/dto/PageResult.java b/apps/user-service/src/main/java/site/icebang/common/dto/PageResult.java new file mode 100644 index 00000000..4a2a8bfa --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/common/dto/PageResult.java @@ -0,0 +1,77 @@ +package site.icebang.common.dto; + +import java.util.List; +import java.util.function.Supplier; + +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +public class PageResult { + private List data; + private int total; + private int current; + private int pageSize; + private int totalPages; + private boolean hasNext; + private boolean hasPrevious; + + public PageResult(List data, int total, int current, int pageSize) { + this.data = data; + this.total = total; + this.current = current; + this.pageSize = pageSize; + calculatePagination(); + } + + // 페이징 계산 로직 분리 + private void calculatePagination() { + this.totalPages = total > 0 ? (int) Math.ceil((double) total / pageSize) : 0; + this.hasNext = current < totalPages; + this.hasPrevious = current > 1; + } + + // 기존 of 메서드 + public static PageResult of(List data, int total, int current, int pageSize) { + return new PageResult<>(data, total, current, pageSize); + } + + // PageParams를 받는 of 메서드 + public static PageResult of(List data, int total, PageParams pageParams) { + return new PageResult<>(data, total, pageParams.getCurrent(), pageParams.getPageSize()); + } + + // 함수형 인터페이스를 활용한 from 메서드 (트랜잭션 내에서 실행) + public static PageResult from( + PageParams pageParams, Supplier> dataSupplier, Supplier countSupplier) { + List data = dataSupplier.get(); + int total = countSupplier.get(); + return new PageResult<>(data, total, pageParams.getCurrent(), pageParams.getPageSize()); + } + + // 빈 페이지 결과 생성 + public static PageResult empty(PageParams pageParams) { + return new PageResult<>(List.of(), 0, pageParams.getCurrent(), pageParams.getPageSize()); + } + + // 빈 페이지 결과 생성 (기본값) + public static PageResult empty() { + return new PageResult<>(List.of(), 0, 1, 10); + } + + // 데이터가 있는지 확인 + public boolean hasData() { + return data != null && !data.isEmpty(); + } + + // 첫 번째 페이지인지 확인 + public boolean isFirstPage() { + return current == 1; + } + + // 마지막 페이지인지 확인 + public boolean isLastPage() { + return current == totalPages; + } +} diff --git a/apps/user-service/src/main/java/site/icebang/common/service/PageableService.java b/apps/user-service/src/main/java/site/icebang/common/service/PageableService.java new file mode 100644 index 00000000..25d41d29 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/common/service/PageableService.java @@ -0,0 +1,8 @@ +package site.icebang.common.service; + +import site.icebang.common.dto.PageParams; +import site.icebang.common.dto.PageResult; + +public interface PageableService { + PageResult getPagedResult(PageParams pageParams); +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java new file mode 100644 index 00000000..39077eca --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java @@ -0,0 +1,28 @@ +package site.icebang.domain.workflow.controller; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.ModelAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import lombok.RequiredArgsConstructor; + +import site.icebang.common.dto.ApiResponse; +import site.icebang.common.dto.PageParams; +import site.icebang.common.dto.PageResult; +import site.icebang.domain.workflow.dto.WorkflowCardDto; +import site.icebang.domain.workflow.service.WorkflowService; + +@RestController +@RequestMapping("/v0/workflows") +@RequiredArgsConstructor +public class WorkflowController { + private final WorkflowService workflowService; + + @GetMapping("") + public ApiResponse> getWorkflowList( + @ModelAttribute PageParams pageParams) { + PageResult result = workflowService.getPagedResult(pageParams); + return ApiResponse.success(result); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java new file mode 100644 index 00000000..b54a29c0 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java @@ -0,0 +1,6 @@ +package site.icebang.domain.workflow.dto; + +import lombok.Data; + +@Data +public class WorkflowCardDto {} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java new file mode 100644 index 00000000..1a358924 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java @@ -0,0 +1,27 @@ +package site.icebang.domain.workflow.service; + +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import lombok.RequiredArgsConstructor; + +import site.icebang.common.dto.PageParams; +import site.icebang.common.dto.PageResult; +import site.icebang.common.service.PageableService; +import site.icebang.domain.workflow.dto.WorkflowCardDto; + +@Service +@RequiredArgsConstructor +public class WorkflowService implements PageableService { + + @Override + @Transactional(readOnly = true) + public PageResult getPagedResult(PageParams pageParams) { + throw new RuntimeException("Not implemented"); + // return PageResult.from( + // pageParams, + // () -> workflowMapper.selectWorkflowList(pageParams), + // () -> workflowMapper.selectWorkflowCount(pageParams) + // ); + } +} From d766df4b12478336b88d5340e9fc9cc0e4336da9 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Mon, 15 Sep 2025 18:55:59 +0900 Subject: [PATCH 20/57] =?UTF-8?q?feat:=20crawling=5Futil=20=ED=95=9C?= =?UTF-8?q?=EA=B0=9C=EB=A1=9C=20=ED=95=A9=EC=B9=A8=201.=20service=20?= =?UTF-8?q?=EC=88=98=EC=A0=95=202.=20schemas=20=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/api/endpoints/blog.py | 6 +- .../app/api/endpoints/keywords.py | 6 +- .../app/api/endpoints/product.py | 45 ++--- apps/pre-processing-service/app/api/router.py | 1 - .../service/blog/base_blog_post_service.py | 65 +++---- .../service/blog/blogger_blog_post_adapter.py | 82 ++++++++ .../service/blog/blogger_blog_post_service.py | 93 ++++----- .../app/service/crawl_service.py | 5 +- .../app/service/crawlers/__init__.py | 0 .../app/service/crawlers/base_crawler.py | 56 ++++++ .../crawlers/detail_crawler.py} | 182 +----------------- .../app/service/crawlers/search_crawler.py | 136 +++++++++++++ .../app/service/search_service.py | 2 +- .../app/service/similarity_service.py | 12 -- .../app/test/test_keyword.py | 11 -- .../app/test/test_match_service.py | 10 - .../app/test/test_sadagu_crawl.py | 11 +- .../app/test/test_search_service.py | 8 +- .../app/test/test_similarity_service.py | 13 -- .../app/utils/crawling_util.py | 77 ++++---- 20 files changed, 420 insertions(+), 401 deletions(-) create mode 100644 apps/pre-processing-service/app/service/blog/blogger_blog_post_adapter.py create mode 100644 apps/pre-processing-service/app/service/crawlers/__init__.py create mode 100644 apps/pre-processing-service/app/service/crawlers/base_crawler.py rename apps/pre-processing-service/app/{utils/crawler_utils.py => service/crawlers/detail_crawler.py} (55%) create mode 100644 apps/pre-processing-service/app/service/crawlers/search_crawler.py diff --git a/apps/pre-processing-service/app/api/endpoints/blog.py b/apps/pre-processing-service/app/api/endpoints/blog.py index 138fb706..158faf20 100644 --- a/apps/pre-processing-service/app/api/endpoints/blog.py +++ b/apps/pre-processing-service/app/api/endpoints/blog.py @@ -4,7 +4,7 @@ from ...model.schemas import * from app.service.blog.tistory_blog_post_service import TistoryBlogPostService from app.service.blog.naver_blog_post_service import NaverBlogPostService -from ...service.blog.blogger_blog_post_service import BloggerBlogPostService +from ...service.blog.blogger_blog_post_adapter import BloggerBlogPostAdapter # 수정된 import router = APIRouter() @@ -62,7 +62,7 @@ async def publish(request: RequestBlogPublish): return ResponseBlogPublish(status="success", metadata=result) elif request.tag == "blogger": - blogger_service = BloggerBlogPostService() + blogger_service = BloggerBlogPostAdapter() # 수정: Adapter 사용 result = blogger_service.post_content( title=request.post_title, content=request.post_content, @@ -74,4 +74,4 @@ async def publish(request: RequestBlogPublish): "블로거 블로그 포스팅에 실패했습니다.", status_code=500 ) - return ResponseBlogPublish(status="success", metadata=result) + return ResponseBlogPublish(status="success", metadata=result) \ No newline at end of file diff --git a/apps/pre-processing-service/app/api/endpoints/keywords.py b/apps/pre-processing-service/app/api/endpoints/keywords.py index 92c8a66b..43c0049b 100644 --- a/apps/pre-processing-service/app/api/endpoints/keywords.py +++ b/apps/pre-processing-service/app/api/endpoints/keywords.py @@ -5,6 +5,9 @@ router = APIRouter() +@router.get("/") +async def root(): + return {"message": "keyword API"} @router.post( "/search", response_model=ResponseNaverSearch, summary="네이버 키워드 검색" @@ -15,9 +18,6 @@ async def search(request: RequestNaverSearch): 요청 예시: { - "job_id": 1, - "schedule_id": 1, - "schedule_his_id": 1, "tag": "naver", "category": "50000000", "start_date": "2025-09-01", diff --git a/apps/pre-processing-service/app/api/endpoints/product.py b/apps/pre-processing-service/app/api/endpoints/product.py index 95b983e4..ab309595 100644 --- a/apps/pre-processing-service/app/api/endpoints/product.py +++ b/apps/pre-processing-service/app/api/endpoints/product.py @@ -8,6 +8,7 @@ from ...service.crawl_service import CrawlService from ...service.search_service import SearchService from ...service.match_service import MatchService +from ...service.similarity_service import SimilarityService # from ...service.similarity_service import SimilarityService @@ -54,33 +55,33 @@ async def match(request: RequestSadaguMatch): raise HTTPException(status_code=500, detail=str(e)) -# @router.post( -# "/similarity", response_model=ResponseSadaguSimilarity, summary="상품 유사도 분석" -# ) -# async def similarity(request: RequestSadaguSimilarity): -# """ -# 매칭된 상품들 중 키워드와의 유사도를 계산하여 최적의 상품을 선택합니다. -# """ -# try: -# similarity_service = SimilarityService() -# result = similarity_service.select_product_by_similarity(request) -# -# if not result: -# raise CustomException( -# 500, "유사도 분석에 실패했습니다.", "SIMILARITY_FAILED" -# ) -# -# return result -# except InvalidItemDataException as e: -# raise HTTPException(status_code=e.status_code, detail=e.detail) -# except Exception as e: -# raise HTTPException(status_code=500, detail=str(e)) +@router.post( + "/similarity", response_model=ResponseSadaguSimilarity, summary="상품 유사도 분석" +) +async def similarity(request: RequestSadaguSimilarity): + """ + 매칭된 상품들 중 키워드와의 유사도를 계산하여 최적의 상품을 선택합니다. + """ + try: + similarity_service = SimilarityService() + result = similarity_service.select_product_by_similarity(request) + + if not result: + raise CustomException( + 500, "유사도 분석에 실패했습니다.", "SIMILARITY_FAILED" + ) + + return result + except InvalidItemDataException as e: + raise HTTPException(status_code=e.status_code, detail=e.detail) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) @router.post( "/crawl", response_model=ResponseSadaguCrawl, summary="상품 상세 정보 크롤링" ) -async def crawl(request: Request, body: RequestSadaguCrawl): +async def crawl(body: RequestSadaguCrawl): """ 상품 상세 페이지를 크롤링하여 상세 정보를 수집합니다. """ diff --git a/apps/pre-processing-service/app/api/router.py b/apps/pre-processing-service/app/api/router.py index 99286cf6..b180c97e 100644 --- a/apps/pre-processing-service/app/api/router.py +++ b/apps/pre-processing-service/app/api/router.py @@ -17,7 +17,6 @@ # 모듈 테스터를 위한 endpoint -> 추후 삭제 예정 api_router.include_router(test.router, prefix="/tests", tags=["Test"]) - @api_router.get("/ping") async def root(): return {"message": "서버 실행중입니다."} diff --git a/apps/pre-processing-service/app/service/blog/base_blog_post_service.py b/apps/pre-processing-service/app/service/blog/base_blog_post_service.py index ff4b2754..d6d6989b 100644 --- a/apps/pre-processing-service/app/service/blog/base_blog_post_service.py +++ b/apps/pre-processing-service/app/service/blog/base_blog_post_service.py @@ -1,61 +1,48 @@ from abc import ABC, abstractmethod -from typing import Dict, List, Optional +from typing import Dict from app.utils.crawling_util import CrawlingUtil from app.errors.BlogPostingException import * from app.errors.CrawlingException import * - class BaseBlogPostService(ABC): """ 블로그 포스팅 서비스 추상 클래스 """ - def __init__(self, config_file="blog_config.json"): - """공통 초기화 로직""" - # Selenium 기반 서비스를 위한 초기화 - if self._requires_webdriver(): + def __init__(self, use_webdriver=True): + """ + 공통 초기화 로직 + :param use_webdriver: 웹드라이버 사용 여부 (API 서비스의 경우 False) + """ + self.use_webdriver = use_webdriver + + if self.use_webdriver: try: - self.crawling_service = CrawlingUtil() + # 블로그 포스팅용 설정으로 초기화 + self.crawling_service = CrawlingUtil( + headless=False, # 네이버 탐지 우회를 위해 headless 비활성화 + for_blog_posting=True + ) self.web_driver = self.crawling_service.get_driver() self.wait_driver = self.crawling_service.get_wait() except Exception: raise WebDriverConnectionException() else: - # API 기반 서비스의 경우 WebDriver가 필요 없음 self.crawling_service = None self.web_driver = None self.wait_driver = None - # API 기반 서비스를 위한 초기화 - self.config_file = config_file - self.config = {} - self.current_upload_account = None - - # API 관련 속성들 (사용하지 않는 서비스에서는 None으로 유지) - self.blogger_service = None - self.blog_id = None - self.scopes = None - self._load_config() - def _requires_webdriver(self) -> bool: - """ - 서브클래스에서 WebDriver가 필요한지 여부를 반환 - 기본값은 True (Selenium 기반), API 기반 서비스에서는 False로 오버라이드 - """ - return True - @abstractmethod def _load_config(self) -> None: """플랫폼별 설정 로드""" pass + @abstractmethod def _login(self) -> None: - """ - 플랫폼별 로그인 구현 (API 기반 서비스의 경우 인증으로 대체) - 기본 구현은 아무것도 하지 않음 (API 서비스용) - """ + """플랫폼별 로그인 구현""" pass @abstractmethod @@ -74,15 +61,21 @@ def _get_platform_name(self) -> str: pass @abstractmethod - def _validate_content( - self, title: str, content: str, tags: Optional[List[str]] = None - ) -> None: + def _validate_content(self, title: str, content: str, tags: Optional[List[str]] = None) -> None: """ 공통 유효성 검사 로직 :param title: 포스트 제목 :param content: 포스트 내용 :param tags: 포스트 태그 리스트 """ + # if not title or not title.strip(): + # raise BlogContentValidationException("title", "제목이 비어있습니다") + # + # if not content or not content.strip(): + # raise BlogContentValidationException("content", "내용이 비어있습니다") + # + # if tags is None: + # raise BlogContentValidationException("tags", "태그가 비어있습니다") pass def post_content(self, title: str, content: str, tags: List[str] = None) -> Dict: @@ -96,7 +89,7 @@ def post_content(self, title: str, content: str, tags: List[str] = None) -> Dict # 1. 콘텐츠 유효성 검사 self._validate_content(title, content, tags) - # 2. 로그인 (Selenium 기반) 또는 인증 (API 기반) + # 2. 로그인 self._login() # 3. 포스트 작성 및 발행 @@ -107,10 +100,10 @@ def post_content(self, title: str, content: str, tags: List[str] = None) -> Dict "platform": self._get_platform_name(), "title": title, "content_length": len(content), - "tags": tags or [], + "tags": tags or [] } def __del__(self): """공통 리소스 정리""" - if hasattr(self, "web_driver") and self.web_driver: - self.web_driver.quit() + if hasattr(self, 'web_driver') and self.web_driver: + self.web_driver.quit() \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/blog/blogger_blog_post_adapter.py b/apps/pre-processing-service/app/service/blog/blogger_blog_post_adapter.py new file mode 100644 index 00000000..1daba4af --- /dev/null +++ b/apps/pre-processing-service/app/service/blog/blogger_blog_post_adapter.py @@ -0,0 +1,82 @@ +from typing import Dict, List, Optional +from app.service.blog.base_blog_post_service import BaseBlogPostService +from app.service.blog.blogger_blog_post_service import BloggerApiService +from app.errors.BlogPostingException import * + + +class BloggerBlogPostAdapter(BaseBlogPostService): + """ + BaseBlogPostService와 호환되도록 BloggerApiService를 감싼 어댑터 + 현재 BaseBlogPostService 인터페이스와 호환 + """ + + def __init__(self, config_file="blog_config.json"): + # API 전용 서비스 (Adaptee) 먼저 초기화 + self.api_service = BloggerApiService(config_file=config_file) + + try: + # 부모 클래스의 웹드라이버 초기화를 시도하지만, 실패해도 무시 + # 이렇게 하면 부모의 다른 초기화 로직은 실행됨 + super().__init__() + except Exception: + # 웹드라이버 초기화 실패 시 API 서비스용으로 속성 설정 + self.crawling_service = None + self.web_driver = None + self.wait_driver = None + # 설정 로드는 직접 호출 + self._load_config() + + def _load_config(self) -> None: + """ + BloggerApiService 내부에서 이미 처리되므로 별도 구현 불필요 + """ + # API 서비스의 설정이 이미 로드되었으므로 추가 작업 없음 + pass + + def _login(self) -> None: + """ + Selenium 로그인과 달리, OAuth 인증으로 대체 + """ + try: + self.api_service.authenticate_with_google_oauth() + except Exception as e: + raise BlogLoginException("Blogger", f"OAuth 인증 실패: {str(e)}") + + def _write_content(self, title: str, content: str, tags: List[str] = None) -> None: + """ + API를 통한 포스트 작성 + """ + try: + result = self.api_service.create_post_via_api(title, content, labels=tags) + # 결과 로깅 + print(f"포스트 생성 완료: {result.get('published_url', 'URL 없음')}") + except Exception as e: + raise BlogPostPublishException("Blogger", f"포스트 작성 실패: {str(e)}") + + def _get_platform_name(self) -> str: + """플랫폼 이름 반환""" + return "Blogger" + + def _validate_content(self, title: str, content: str, tags: Optional[List[str]] = None) -> None: + """ + API 전용 유효성 검사 호출 + """ + try: + # Optional을 List로 변환 (None인 경우 빈 리스트) + tags_list = tags if tags is not None else [] + self.api_service.validate_api_content(title, content, labels=tags_list) + except Exception as e: + # BloggerApiService의 예외를 BaseBlogPostService 호환 예외로 변환 + if "title" in str(e).lower(): + raise BlogContentValidationException("title", str(e)) + elif "content" in str(e).lower(): + raise BlogContentValidationException("content", str(e)) + else: + raise BlogContentValidationException("general", str(e)) + + def __del__(self): + """ + API 서비스이므로 웹드라이버 정리가 불필요 + """ + # 웹드라이버가 없으므로 정리할 것이 없음 + pass \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py b/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py index 07e337d9..86de82a6 100644 --- a/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py +++ b/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py @@ -1,39 +1,32 @@ import json import os import pickle -from typing import Dict, List, Optional - from googleapiclient.discovery import build from google.auth.transport.requests import Request from google_auth_oauthlib.flow import InstalledAppFlow - from app.errors.BlogPostingException import * -from app.service.blog.base_blog_post_service import BaseBlogPostService +from typing import Dict -class BloggerBlogPostService(BaseBlogPostService): +class BloggerApiService: """ - Blogger API를 사용하여 포스팅을 관리하는 서비스 + 호환되지 않는 Blogger API 서비스 (Adaptee) + 완전히 다른 초기화/인증 방식을 사용 """ def __init__(self, config_file="blog_config.json"): - # 부모 클래스 생성자 호출 (WebDriver는 None으로 설정됨) - super().__init__() - - # API 관련 추가 초기화 self.config_file = config_file + self.config = {} + self.current_upload_account = None self.blogger_service = None self.blog_id = None self.scopes = ["https://www.googleapis.com/auth/blogger"] + self.authenticated = False - def _requires_webdriver(self) -> bool: - """API 기반 서비스는 WebDriver가 필요하지 않음""" - return False + self._load_api_config() - def _load_config(self) -> None: - """ - 플랫폼별 설정 로드 - """ + def _load_api_config(self) -> None: + """API 전용 설정 로드""" try: with open(self.config_file, "r", encoding="utf-8") as f: self.config = json.load(f) @@ -48,16 +41,11 @@ def _load_config(self) -> None: self.config = default_config self.current_upload_account = self.config["upload_account"] - def _login(self) -> None: - """ - API 인증 (Selenium의 로그인을 대체) - """ - self._authenticate_api() + def authenticate_with_google_oauth(self) -> bool: + """Google OAuth 인증 (Selenium 로그인과 완전히 다름)""" + if self.authenticated: + return True - def _authenticate_api(self): - """ - API 인증 및 서비스 객체 생성 - """ token_file = f"token_{self.current_upload_account.replace('@', '_').replace('.', '_')}.pkl" try: @@ -85,22 +73,24 @@ def _authenticate_api(self): if blogs.get("items"): self.blog_id = blogs["items"][0]["id"] print(f"API 설정 완료 - 블로그: {blogs['items'][0]['name']}") + self.authenticated = True return True else: - print("블로그를 찾을 수 없습니다.") - return False + raise BloggerApiException("블로그를 찾을 수 없습니다") + except Exception as e: - print(f"API 인증/설정 실패: {e}") raise BloggerApiException("API 인증 실패", e) - def _write_content(self, title: str, content: str, tags: List[str] = None) -> None: - """ - API를 사용하여 포스팅 작성 - """ - if not self.blogger_service or not self.blog_id: - self._authenticate_api() + def create_post_via_api(self, title: str, content: str, labels: List[str] = None) -> Dict: + """API를 통한 포스트 생성 (Selenium write_content와 완전히 다름)""" + if not self.authenticated: + self.authenticate_with_google_oauth() - post_data = {"title": title, "content": content, "labels": tags or []} + post_data = { + "title": title, + "content": content, + "labels": labels or [] + } try: result = ( @@ -109,35 +99,20 @@ def _write_content(self, title: str, content: str, tags: List[str] = None) -> No .execute() ) - print(f"포스트 생성 완료: {result.get('url')}") + return { + "blogger_post_id": result.get("id"), + "published_url": result.get("url"), + "status": "published" + } except Exception as e: raise BlogPostPublishException( platform="Blogger", reason="API 통신 중 오류가 발생했습니다." ) from e - def _get_platform_name(self) -> str: - """플랫폼 이름 반환""" - return "Blogger" - - def _validate_content( - self, title: str, content: str, tags: Optional[List[str]] = None - ) -> None: - """ - 공통 유효성 검사 로직 - """ + def validate_api_content(self, title: str, content: str, labels: List[str] = None) -> None: + """API 전용 유효성 검사""" if not title or not title.strip(): raise BlogContentValidationException("title", "제목이 비어있습니다") - if not content or not content.strip(): raise BlogContentValidationException("content", "내용이 비어있습니다") - - # 태그 유효성 검사도 필요에 따라 추가 - # if not tags or not isinstance(tags, list): - # raise BlogContentValidationException("tags", "태그는 리스트 형태여야 합니다") - - def __del__(self): - """ - 리소스 정리 - API 기반 서비스는 별도 정리 불필요 - 부모 클래스의 __del__이 WebDriver 정리를 처리 - """ - super().__del__() + # Blogger는 태그가 선택사항 \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/crawl_service.py b/apps/pre-processing-service/app/service/crawl_service.py index 4122bb2e..8543658e 100644 --- a/apps/pre-processing-service/app/service/crawl_service.py +++ b/apps/pre-processing-service/app/service/crawl_service.py @@ -1,5 +1,5 @@ import time -from app.utils.crawler_utils import DetailCrawler +from app.service.crawlers.detail_crawler import DetailCrawler from app.errors.CustomException import InvalidItemDataException from app.model.schemas import RequestSadaguCrawl from loguru import logger @@ -37,9 +37,6 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: # 응답 데이터 구성 response_data = { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, "tag": request.tag, "product_url": str(request.product_url), "product_detail": product_detail, diff --git a/apps/pre-processing-service/app/service/crawlers/__init__.py b/apps/pre-processing-service/app/service/crawlers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/pre-processing-service/app/service/crawlers/base_crawler.py b/apps/pre-processing-service/app/service/crawlers/base_crawler.py new file mode 100644 index 00000000..dc495843 --- /dev/null +++ b/apps/pre-processing-service/app/service/crawlers/base_crawler.py @@ -0,0 +1,56 @@ +import httpx +import time +from abc import ABC, abstractmethod +from bs4 import BeautifulSoup +from loguru import logger +from app.utils.crawling_util import CrawlingUtil + + +class BaseCrawler(ABC): + """크롤러 기본 클래스""" + + def __init__(self, use_selenium: bool = True, headless: bool = True): + self.base_url = "https://ssadagu.kr" + self.use_selenium = use_selenium + + if use_selenium: + self._setup_selenium(headless) + else: + self._setup_httpx() + + def _setup_selenium(self, headless: bool): + """Selenium WebDriver 초기화""" + try: + self.crawling_util = CrawlingUtil(headless=headless) + self.driver = self.crawling_util.get_driver() + self.wait = self.crawling_util.get_wait() + logger.info("Selenium WebDriver 초기화 완료") + except Exception as e: + logger.warning(f"Selenium 초기화 실패, httpx로 대체: {e}") + self.use_selenium = False + self._setup_httpx() + + def _setup_httpx(self): + """httpx 클라이언트 초기화""" + self.client = httpx.AsyncClient( + headers={ + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36" + }, + timeout=30.0, + ) + logger.info("httpx 클라이언트 초기화 완료") + + async def close(self): + """리소스 정리""" + if self.use_selenium and hasattr(self, 'crawling_util'): + try: + self.crawling_util.close() + logger.info("Selenium WebDriver 종료 완료") + except Exception as e: + logger.warning(f"Selenium WebDriver 종료 중 오류: {e}") + elif hasattr(self, 'client'): + try: + await self.client.aclose() + logger.info("httpx 클라이언트 종료 완료") + except Exception as e: + logger.warning(f"httpx 클라이언트 종료 중 오류: {e}") \ No newline at end of file diff --git a/apps/pre-processing-service/app/utils/crawler_utils.py b/apps/pre-processing-service/app/service/crawlers/detail_crawler.py similarity index 55% rename from apps/pre-processing-service/app/utils/crawler_utils.py rename to apps/pre-processing-service/app/service/crawlers/detail_crawler.py index 5c593b9f..83829f5a 100644 --- a/apps/pre-processing-service/app/utils/crawler_utils.py +++ b/apps/pre-processing-service/app/service/crawlers/detail_crawler.py @@ -1,185 +1,9 @@ -import urllib.parse -import httpx -import re import time +import re from bs4 import BeautifulSoup -from selenium import webdriver -from selenium.webdriver.chrome.options import Options -from selenium.webdriver.common.by import By -from selenium.webdriver.support.ui import WebDriverWait -from selenium.common.exceptions import TimeoutException, NoSuchElementException +from .search_crawler import SearchCrawler from loguru import logger - -class SearchCrawler: - def __init__(self, use_selenium=True): - self.base_url = "https://ssadagu.kr" - self.use_selenium = use_selenium - - if use_selenium: - self._setup_selenium() - else: - self._setup_httpx() - - def _setup_selenium(self): - """Selenium WebDriver 초기화""" - chrome_options = Options() - chrome_options.add_argument("--headless") - chrome_options.add_argument("--no-sandbox") - chrome_options.add_argument("--disable-dev-shm-usage") - chrome_options.add_argument("--disable-gpu") - chrome_options.add_argument("--window-size=1920,1080") - chrome_options.add_argument( - "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36" - ) - - try: - self.driver = webdriver.Chrome(options=chrome_options) - self.wait = WebDriverWait(self.driver, 10) - logger.info("Selenium WebDriver 초기화 완료") - except Exception as e: - logger.warning(f"Selenium 초기화 실패, httpx로 대체: {e}") - self.use_selenium = False - self._setup_httpx() - - def _setup_httpx(self): - """httpx 클라이언트 초기화""" - self.client = httpx.AsyncClient( - headers={ - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36" - }, - timeout=30.0, - ) - logger.info("httpx 클라이언트 초기화 완료") - - async def search_products_selenium(self, keyword: str) -> list[dict]: - """Selenium을 사용한 상품 검색""" - encoded_keyword = urllib.parse.quote(keyword) - search_url = f"{self.base_url}/shop/search.php?ss_tx={encoded_keyword}" - - try: - logger.info( - f"Selenium 상품 검색 시작: keyword='{keyword}', url='{search_url}'" - ) - self.driver.get(search_url) - time.sleep(5) - - product_links = [] - link_elements = self.driver.find_elements(By.TAG_NAME, "a") - - for element in link_elements: - href = element.get_attribute("href") - if ( - href - and "view.php" in href - and ("platform=1688" in href or "num_iid" in href) - ): - try: - title = element.get_attribute("title") or element.text.strip() - if title: - product_links.append({"url": href, "title": title}) - except: - product_links.append({"url": href, "title": "Unknown Title"}) - - # 중복 제거 - seen_urls = set() - unique_products = [] - for product in product_links: - if product["url"] not in seen_urls: - seen_urls.add(product["url"]) - unique_products.append(product) - - logger.info( - f"Selenium으로 발견한 상품 링크: {len(unique_products)}개 (중복 제거 전: {len(product_links)}개)" - ) - return unique_products[:20] - - except Exception as e: - logger.error(f"Selenium 검색 오류: keyword='{keyword}', error='{e}'") - return [] - - async def search_products_httpx(self, keyword: str) -> list[dict]: - """httpx를 사용한 상품 검색""" - encoded_keyword = urllib.parse.quote(keyword) - search_url = f"{self.base_url}/shop/search.php?ss_tx={encoded_keyword}" - - try: - logger.info( - f"httpx 상품 검색 시작: keyword='{keyword}', url='{search_url}'" - ) - response = await self.client.get(search_url) - response.raise_for_status() - soup = BeautifulSoup(response.content, "html.parser") - - product_links = [] - all_links = soup.find_all("a", href=True) - - for link in all_links: - href = link["href"] - if "view.php" in href and ( - "platform=1688" in href or "num_iid" in href - ): - full_url = ( - f"{self.base_url}{href}" if href.startswith("/") else href - ) - title = ( - link.get("title", "") - or link.get_text(strip=True) - or "Unknown Title" - ) - - product_links.append({"url": full_url, "title": title}) - - logger.info(f"httpx로 발견한 상품 링크: {len(product_links)}개") - return product_links[:20] - - except Exception as e: - logger.error(f"httpx 검색 오류: keyword='{keyword}', error='{e}'") - return [] - - async def get_basic_product_info(self, product_url: str) -> dict: - """기본 상품 정보만 크롤링""" - try: - logger.debug(f"기본 상품 정보 크롤링 시작: url='{product_url}'") - - if self.use_selenium: - self.driver.get(product_url) - self.wait.until( - lambda driver: driver.execute_script("return document.readyState") - == "complete" - ) - soup = BeautifulSoup(self.driver.page_source, "html.parser") - else: - response = await self.client.get(product_url) - response.raise_for_status() - soup = BeautifulSoup(response.content, "html.parser") - - title_element = soup.find("h1", {"id": "kakaotitle"}) - title = title_element.get_text(strip=True) if title_element else "제목 없음" - - logger.debug(f"기본 상품 정보 크롤링 완료: title='{title[:50]}'") - return {"url": product_url, "title": title} - - except Exception as e: - logger.error(f"기본 상품 크롤링 오류: url='{product_url}', error='{e}'") - return None - - async def close(self): - """리소스 정리""" - if self.use_selenium and hasattr(self, "driver"): - try: - self.driver.quit() - logger.info("Selenium WebDriver 종료 완료") - except Exception as e: - logger.warning(f"Selenium WebDriver 종료 중 오류: {e}") - elif hasattr(self, "client"): - try: - await self.client.aclose() - logger.info("httpx 클라이언트 종료 완료") - except Exception as e: - logger.warning(f"httpx 클라이언트 종료 중 오류: {e}") - - class DetailCrawler(SearchCrawler): """SearchCrawler를 확장한 상세 크롤링 클래스""" @@ -404,4 +228,4 @@ def _extract_images(self, soup: BeautifulSoup) -> list[str]: logger.debug(f"이미지 URL 추출: {src}") logger.info(f"총 {len(images)}개 이미지 URL 추출 완료") - return images + return images \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/crawlers/search_crawler.py b/apps/pre-processing-service/app/service/crawlers/search_crawler.py new file mode 100644 index 00000000..41610a2d --- /dev/null +++ b/apps/pre-processing-service/app/service/crawlers/search_crawler.py @@ -0,0 +1,136 @@ +import urllib.parse +import time +from .base_crawler import BaseCrawler +from loguru import logger +from bs4 import BeautifulSoup +from selenium.webdriver.common.by import By + +class SearchCrawler(BaseCrawler): + """상품 검색 전용 크롤러""" + + async def search_products_selenium(self, keyword: str) -> list[dict]: + """Selenium을 사용한 상품 검색""" + encoded_keyword = urllib.parse.quote(keyword) + search_url = f"{self.base_url}/shop/search.php?ss_tx={encoded_keyword}" + + try: + logger.info( + f"Selenium 상품 검색 시작: keyword='{keyword}', url='{search_url}'" + ) + self.driver.get(search_url) + time.sleep(5) + + product_links = [] + link_elements = self.driver.find_elements(By.TAG_NAME, "a") + + for element in link_elements: + href = element.get_attribute("href") + if ( + href + and "view.php" in href + and ("platform=1688" in href or "num_iid" in href) + ): + try: + title = element.get_attribute("title") or element.text.strip() + if title: + product_links.append({"url": href, "title": title}) + except: + product_links.append({"url": href, "title": "Unknown Title"}) + + # 중복 제거 + seen_urls = set() + unique_products = [] + for product in product_links: + if product["url"] not in seen_urls: + seen_urls.add(product["url"]) + unique_products.append(product) + + logger.info( + f"Selenium으로 발견한 상품 링크: {len(unique_products)}개 (중복 제거 전: {len(product_links)}개)" + ) + return unique_products[:20] + + except Exception as e: + logger.error(f"Selenium 검색 오류: keyword='{keyword}', error='{e}'") + return [] + + async def search_products_httpx(self, keyword: str) -> list[dict]: + """httpx를 사용한 상품 검색""" + encoded_keyword = urllib.parse.quote(keyword) + search_url = f"{self.base_url}/shop/search.php?ss_tx={encoded_keyword}" + + try: + logger.info( + f"httpx 상품 검색 시작: keyword='{keyword}', url='{search_url}'" + ) + response = await self.client.get(search_url) + response.raise_for_status() + soup = BeautifulSoup(response.content, "html.parser") + + product_links = [] + all_links = soup.find_all("a", href=True) + + for link in all_links: + href = link["href"] + if "view.php" in href and ( + "platform=1688" in href or "num_iid" in href + ): + full_url = ( + f"{self.base_url}{href}" if href.startswith("/") else href + ) + title = ( + link.get("title", "") + or link.get_text(strip=True) + or "Unknown Title" + ) + + product_links.append({"url": full_url, "title": title}) + + logger.info(f"httpx로 발견한 상품 링크: {len(product_links)}개") + return product_links[:20] + + except Exception as e: + logger.error(f"httpx 검색 오류: keyword='{keyword}', error='{e}'") + return [] + + async def get_basic_product_info(self, product_url: str) -> dict: + """기본 상품 정보만 크롤링""" + try: + logger.debug(f"기본 상품 정보 크롤링 시작: url='{product_url}'") + + if self.use_selenium: + self.driver.get(product_url) + self.wait.until( + lambda driver: driver.execute_script("return document.readyState") + == "complete" + ) + soup = BeautifulSoup(self.driver.page_source, "html.parser") + else: + response = await self.client.get(product_url) + response.raise_for_status() + soup = BeautifulSoup(response.content, "html.parser") + + title_element = soup.find("h1", {"id": "kakaotitle"}) + title = title_element.get_text(strip=True) if title_element else "제목 없음" + + logger.debug(f"기본 상품 정보 크롤링 완료: title='{title[:50]}'") + return {"url": product_url, "title": title} + + except Exception as e: + logger.error(f"기본 상품 크롤링 오류: url='{product_url}', error='{e}'") + return None + + async def close(self): + """리소스 정리""" + if self.use_selenium and hasattr(self, "driver"): + try: + self.driver.quit() + logger.info("Selenium WebDriver 종료 완료") + except Exception as e: + logger.warning(f"Selenium WebDriver 종료 중 오류: {e}") + elif hasattr(self, "client"): + try: + await self.client.aclose() + logger.info("httpx 클라이언트 종료 완료") + except Exception as e: + logger.warning(f"httpx 클라이언트 종료 중 오류: {e}") \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/search_service.py b/apps/pre-processing-service/app/service/search_service.py index 4cb1bf99..f7255e61 100644 --- a/apps/pre-processing-service/app/service/search_service.py +++ b/apps/pre-processing-service/app/service/search_service.py @@ -1,4 +1,4 @@ -from app.utils.crawler_utils import SearchCrawler +from app.service.crawlers.search_crawler import SearchCrawler from app.errors.CustomException import InvalidItemDataException from ..model.schemas import RequestSadaguSearch from loguru import logger diff --git a/apps/pre-processing-service/app/service/similarity_service.py b/apps/pre-processing-service/app/service/similarity_service.py index dbd2b762..9015bd94 100644 --- a/apps/pre-processing-service/app/service/similarity_service.py +++ b/apps/pre-processing-service/app/service/similarity_service.py @@ -27,9 +27,6 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict f"매칭된 상품과 검색 결과가 모두 없음: keyword='{keyword}'" ) return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, "keyword": keyword, "selected_product": None, "reason": "매칭된 상품과 검색 결과가 모두 없음", @@ -87,9 +84,6 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict ) return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, "keyword": keyword, "selected_product": selected_product, "reason": f"단일 상품 - 유사도: {similarity:.4f} ({analysis_mode})", @@ -124,9 +118,6 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict f"최고 유사도 미달: similarity={best_result['similarity']:.4f} < threshold={similarity_threshold}" ) return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, "keyword": keyword, "selected_product": None, "reason": f"최고 유사도({best_result['similarity']:.4f}) < 기준({similarity_threshold})", @@ -161,9 +152,6 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict ) return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, "keyword": keyword, "selected_product": selected_product, "reason": reason, diff --git a/apps/pre-processing-service/app/test/test_keyword.py b/apps/pre-processing-service/app/test/test_keyword.py index 2a96796e..095b6607 100644 --- a/apps/pre-processing-service/app/test/test_keyword.py +++ b/apps/pre-processing-service/app/test/test_keyword.py @@ -4,11 +4,6 @@ client = TestClient(app) -JOB_ID = 1 -SCHEDULE_ID = 1 -SCHEDULE_HIS_ID = 1 - - def test_read_root(): response = client.get("/keywords/") assert response.status_code == 200 @@ -26,9 +21,6 @@ def test_read_root(): ) def test_search(tag, category, start_date, end_date): body = { - "job_id": JOB_ID, - "schedule_id": SCHEDULE_ID, - "schedule_his_id": SCHEDULE_HIS_ID, # 오타 수정 "tag": tag, "category": category, "start_date": start_date, @@ -39,9 +31,6 @@ def test_search(tag, category, start_date, end_date): assert response.status_code == 200 response_data = response.json() - assert response_data["job_id"] == body["job_id"] - assert response_data["schedule_id"] == body["schedule_id"] - assert response_data["schedule_his_id"] == body["schedule_his_id"] # 오타 수정 assert response_data["status"] == "success" assert "keyword" in response_data assert isinstance(response_data["total_keyword"], dict) diff --git a/apps/pre-processing-service/app/test/test_match_service.py b/apps/pre-processing-service/app/test/test_match_service.py index 7750cd3d..3f50ffad 100644 --- a/apps/pre-processing-service/app/test/test_match_service.py +++ b/apps/pre-processing-service/app/test/test_match_service.py @@ -23,9 +23,6 @@ def test_match_success(): ] body = { - "job_id": 1, - "schedule_id": 1, - "schedule_his_id": 1, "keyword": "반지", "search_results": sample_search_results, } @@ -35,7 +32,6 @@ def test_match_success(): assert response.status_code == 200 data = response.json() - assert data["job_id"] == body["job_id"] assert data["keyword"] == body["keyword"] assert data["status"] == "success" assert isinstance(data["matched_products"], list) @@ -51,9 +47,6 @@ def test_match_success(): def test_match_no_results(): """검색 결과가 없는 경우""" body = { - "job_id": 2, - "schedule_id": 2, - "schedule_his_id": 2, "keyword": "반지", "search_results": [], } @@ -80,9 +73,6 @@ def test_match_no_matches(): ] body = { - "job_id": 3, - "schedule_id": 3, - "schedule_his_id": 3, "keyword": "반지", "search_results": sample_search_results, } diff --git a/apps/pre-processing-service/app/test/test_sadagu_crawl.py b/apps/pre-processing-service/app/test/test_sadagu_crawl.py index 6c6ad84a..b419b5c6 100644 --- a/apps/pre-processing-service/app/test/test_sadagu_crawl.py +++ b/apps/pre-processing-service/app/test/test_sadagu_crawl.py @@ -7,9 +7,6 @@ def test_crawl_success(): body = { - "job_id": 1, # 문자열 -> 숫자로 수정 - "schedule_id": 1, # 문자열 -> 숫자로 수정 - "schedule_his_id": 1, "tag": "detail", "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=886788894790", "use_selenium": False, @@ -21,8 +18,6 @@ def test_crawl_success(): assert response.status_code == 200 data = response.json() - assert data["job_id"] == body["job_id"] - assert data["schedule_id"] == body["schedule_id"] assert data["product_url"] == body["product_url"] assert "product_detail" in data @@ -39,7 +34,7 @@ def test_crawl_success(): # "include_images": False, # } # -# response = client.post("/products/crawl", json=body) +# response = client.post("/products/crawlers", json=body) # print(f"Response: {response.json()}") # # assert response.status_code == 200 @@ -62,7 +57,7 @@ def test_crawl_success(): # "include_images": False, # } # -# response = client.post("/products/crawl", json=body) +# response = client.post("/products/crawlers", json=body) # print(f"Response: {response.json()}") # # assert response.status_code in (400, 422, 500) @@ -79,7 +74,7 @@ def test_crawl_success(): # "include_images": True, # } # -# response = client.post("/products/crawl", json=body) +# response = client.post("/products/crawlers", json=body) # print(f"Response: {response.json()}") # # assert response.status_code == 200 diff --git a/apps/pre-processing-service/app/test/test_search_service.py b/apps/pre-processing-service/app/test/test_search_service.py index fc64c9cd..d5d3a618 100644 --- a/apps/pre-processing-service/app/test/test_search_service.py +++ b/apps/pre-processing-service/app/test/test_search_service.py @@ -7,14 +7,13 @@ def test_search_success(): """상품 검색 성공 테스트""" - body = {"job_id": 1, "schedule_id": 1, "schedule_his_id": 1, "keyword": "반지"} + body = {"keyword": "반지"} response = client.post("/products/search", json=body) print(f"Search Response: {response.json()}") assert response.status_code == 200 data = response.json() - assert data["job_id"] == body["job_id"] assert data["keyword"] == body["keyword"] assert data["status"] == "success" assert isinstance(data["search_results"], list) @@ -22,7 +21,7 @@ def test_search_success(): def test_search_empty_keyword(): """빈 키워드 검색 테스트""" - body = {"job_id": 2, "schedule_id": 2, "schedule_his_id": 2, "keyword": ""} + body = {"keyword": ""} response = client.post("/products/search", json=body) print(f"Empty keyword response: {response.json()}") @@ -36,9 +35,6 @@ def test_search_empty_keyword(): def test_search_nonexistent_keyword(): """존재하지 않는 키워드 검색""" body = { - "job_id": 3, - "schedule_id": 3, - "schedule_his_id": 3, "keyword": "zxcvbnmasdfghjklqwertyuiop123456789", } diff --git a/apps/pre-processing-service/app/test/test_similarity_service.py b/apps/pre-processing-service/app/test/test_similarity_service.py index cb84d3c3..5eeba78d 100644 --- a/apps/pre-processing-service/app/test/test_similarity_service.py +++ b/apps/pre-processing-service/app/test/test_similarity_service.py @@ -29,9 +29,6 @@ def test_similarity_with_matched_products(): ] body = { - "job_id": 1, - "schedule_id": 1, - "schedule_his_id": 1, "keyword": "반지", "matched_products": matched_products, } @@ -41,7 +38,6 @@ def test_similarity_with_matched_products(): assert response.status_code == 200 data = response.json() - assert data["job_id"] == body["job_id"] assert data["keyword"] == body["keyword"] assert data["status"] == "success" @@ -65,9 +61,6 @@ def test_similarity_fallback_to_search_results(): ] body = { - "job_id": 2, - "schedule_id": 2, - "schedule_his_id": 2, "keyword": "반지", "matched_products": [], # 매칭된 상품 없음 "search_results": search_results, # 폴백용 @@ -100,9 +93,6 @@ def test_similarity_single_candidate(): ] body = { - "job_id": 3, - "schedule_id": 3, - "schedule_his_id": 3, "keyword": "반지", "matched_products": single_product, } @@ -122,9 +112,6 @@ def test_similarity_single_candidate(): def test_similarity_no_candidates(): """후보가 없는 경우""" body = { - "job_id": 4, - "schedule_id": 4, - "schedule_his_id": 4, "keyword": "반지", "matched_products": [], "search_results": [], diff --git a/apps/pre-processing-service/app/utils/crawling_util.py b/apps/pre-processing-service/app/utils/crawling_util.py index 8ec47518..ca9d0405 100644 --- a/apps/pre-processing-service/app/utils/crawling_util.py +++ b/apps/pre-processing-service/app/utils/crawling_util.py @@ -1,59 +1,70 @@ from selenium import webdriver from selenium.webdriver.chrome.options import Options from selenium.webdriver.support.ui import WebDriverWait +from typing import Optional class CrawlingUtil: + """ + 공통 Selenium WebDriver 유틸리티 + 블로그 포스팅과 상품 크롤링 모두 지원 + """ - def __init__(self): - self.options = self._get_chrome_options() - self.driver = None - - def _get_chrome_options(self): + def __init__(self, headless: bool = False, for_blog_posting: bool = False): """ - 크롬 옵션 설정 - 1. 헤드리스 모드 비활성화 (네이버 탐지 우회) - 2. 샌드박스 비활성화 - 3. GPU 비활성화 - 4. 완전한 사용자 에이전트 설정 - 5. 자동화 탐지 우회 설정 + :param headless: 헤드리스 모드 사용 여부 + :param for_blog_posting: 블로그 포스팅용 설정 사용 여부 """ + self.headless = headless + self.for_blog_posting = for_blog_posting + self.options = self._get_chrome_options() + self.driver = None + def _get_chrome_options(self) -> Options: + """크롬 옵션 설정""" options = Options() - options.add_argument( - "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36" - ) - # options.add_argument('--headless') 백그라운드 실행시 주석 해제 + # 기본 설정 options.add_argument("--no-sandbox") options.add_argument("--disable-dev-shm-usage") options.add_argument("--disable-gpu") options.add_argument("--disable-extensions") - options.add_experimental_option("excludeSwitches", ["enable-automation"]) - options.add_experimental_option("useAutomationExtension", False) - options.add_argument("--disable-blink-features=AutomationControlled") - return options + # 헤드리스 모드 설정 + if self.headless: + options.add_argument("--headless") + options.add_argument("--window-size=1920,1080") - def get_driver(self): - """ - 셀레니움 웹 드라이버 반환 - :return: 셀레니움 웹 드라이버 - """ + # 블로그 포스팅용 특별 설정 (네이버 탐지 우회) + if self.for_blog_posting: + options.add_argument( + "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36" + ) + options.add_experimental_option("excludeSwitches", ["enable-automation"]) + options.add_experimental_option("useAutomationExtension", False) + options.add_argument("--disable-blink-features=AutomationControlled") + else: + # 일반 크롤링용 설정 + options.add_argument( + "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36" + ) + return options + + def get_driver(self) -> webdriver.Chrome: + """셀레니움 웹 드라이버 반환""" if self.driver is None: self.driver = webdriver.Chrome(options=self.options) - return self.driver - def get_wait(self, timeout: int = 15): - """ - WebDriverWait 객체 반환 - :param timeout: 대기 시간 (초) - :return: WebDriverWait 객체 - """ - + def get_wait(self, timeout: int = 15) -> WebDriverWait: + """WebDriverWait 객체 반환""" if self.driver is None: self.get_driver() - return WebDriverWait(self.driver, timeout) + + def close(self): + """드라이버 종료""" + if self.driver: + self.driver.quit() + self.driver = None \ No newline at end of file From a3f4bb7f4f0b7e0973e6d4abe3e0b1063b7f7037 Mon Sep 17 00:00:00 2001 From: Jihu Kim Date: Tue, 16 Sep 2025 10:34:26 +0900 Subject: [PATCH 21/57] =?UTF-8?q?=EB=B8=94=EB=A1=9C=EA=B7=B8=20=EC=9E=90?= =?UTF-8?q?=EB=8F=99=ED=99=94=20=EB=B0=B0=EC=B9=98=20Job=20=EB=B0=8F=20Tas?= =?UTF-8?q?klet=20=EC=84=B8=ED=8C=85=20(#91)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor: health관련 패키지 네이밍 및 파일 네이밍 변경 * feat(config): @ConfigurationProperties로 타입-안전한 설정 관리 도입 기존의 @Value나 하드코딩 방식은 오타에 취약하고, 설정값 누락 시 런타임 오류를 유발할 수 있는 타입 불안전성 문제가 있었습니다. 이를 해결하기 위해 @ConfigurationProperties를 사용하는 `FastApiProperties` 클래스를 도입하여 FastAPI 연동 설정을 중앙화하고, 애플리케이션 시작 시점에 설정값의 타입과 유효성을 검증하도록 개선했습니다. 이를 통해 잠재적인 런타임 장애를 원천 차단하고, 코드의 안정성과 유지보수성을 크게 향상시켰습니다. * refactor: Code Formatting * feature: 블로그 자동화 배치 Job 및 Tasklet 구현 콘텐츠 자동화 워크플로우의 핵심 실행 로직인 Spring Batch Job과 그를 구성하는 7개의 Tasklet을 구현합니다. ### 주요 구현 내용: 1. **`BlogAutomationJobConfig.java`**: - 워크플로우의 전체 실행 흐름을 담당하는 두 개의 Job(`productSelectionJob`, `contentPublishingJob`)과 각 Job을 구성하는 Step들을 정의했습니다. - '상품 선정/수집'과 '콘텐츠 생성/발행'이라는 역할을 명확히 분리하여 Job의 재사용성과 독립성을 확보했습니다. 2. **7개의 Tasklet 구현**: - `ExtractTrendKeywordTasklet`: 트렌드 키워드 추출 - `SearchProductsFromMallTasklet`: 키워드로 쇼핑몰 상품 목록 검색 - `MatchProductWithKeywordTasklet`: 키워드와 상품명 매칭 - `FindSimilarProductsTasklet`: 매칭된 상품의 유사 상품 탐색 - `CrawlSelectedProductTasklet`: 최종 선택된 상품 상세 정보 크롤링 - `GenerateBlogContentTasklet`: 수집된 정보로 AI 블로그 원고 생성 - `PublishBlogPostTasklet`: 완성된 원고를 블로그에 발행 3. **Step 간 데이터 전달**: - 각 Tasklet은 `JobExecutionContext`를 통해 다음 단계로 필요한 데이터(추출된 키워드, 선택된 상품 정보, 생성된 콘텐츠 등)를 전달하도록 구현되었습니다. * chore: fastapi와 상호작용을 담당하는 external 패키지 구성 * chore: task 간에 결합도를 줄이기 위한 인터페이스 구성 - 특정 step에서 만든 데이터를 다음 step에서 사용하는 경우가 있음(`JobExecutionContext`를 통해 데이터를 주고 받음) - `JobContextKeys`라는 인터페이스를 정의해 다른 task의 내부 변수를 직접 참조하지 않도록 함 * refactor: FastApiAdapter 및 JobContextKeys 적용 * refactor: schedule 패키지 domain 밖으로 분리 * refactor: AOP를 이용한 Tasklet 실행 로깅 중앙화 각 Tasklet 클래스 내부에 흩어져 있던 시작/종료 로그는 코드 중복을 유발하고, 비즈니스 로직의 가독성을 저해하는 문제가 있었습니다. 이 문제를 해결하기 위해, AOP를 사용하여 모든 Tasklet의 실행을 자동으로 로깅하는 `LoggingAspect`를 확장 구현합니다. ### 주요 변경 사항: - `LoggingAspect.java`에 `batch.tasklet` 패키지를 대상으로 하는 새로운 Pointcut(`taskletMethods`)을 추가했습니다. - `@Around` 어드바이스를 통해 모든 Tasklet의 실행 시작, 종료, 그리고 소요 시간을 자동으로 기록하도록 구현했습니다. ### 기대 효과: - **코드 중복 제거**: 각 Tasklet 클래스에 수동으로 작성했던 시작/종료 로그를 제거하여, 비즈니스 로직의 가독성을 크게 향상시킵니다. - **관심사의 분리(SoC)**: 핵심 로직과 로깅이라는 부가 기능 로직을 완벽하게 분리했습니다. * refactor: Code Formating --- .../icebang/batch/common/JobContextKeys.java | 15 +++ .../batch/job/BlogAutomationJobConfig.java | 115 ++++++++++++++++++ .../batch/job/BlogContentJobConfig.java | 51 -------- .../tasklet/ContentGenerationTasklet.java | 49 -------- .../tasklet/CrawlSelectedProductTasklet.java | 60 +++++++++ .../tasklet/ExtractTrendKeywordTasklet.java | 51 ++++++++ .../tasklet/FindSimilarProductsTasklet.java | 60 +++++++++ .../tasklet/GenerateBlogContentTasklet.java | 62 ++++++++++ .../tasklet/KeywordExtractionTasklet.java | 47 ------- .../MatchProductWithKeywordTasklet.java | 57 +++++++++ .../batch/tasklet/PublishBlogPostTasklet.java | 68 +++++++++++ .../SearchProductsFromMallTasklet.java | 58 +++++++++ .../fastapi/adapter/FastApiAdapter.java | 106 ++++++++++++++++ .../external/fastapi/dto/FastApiDto.java | 103 ++++++++++++++++ .../global/aop/logging/LoggingAspect.java | 14 +++ .../schedule/mapper/ScheduleMapper.java | 4 +- .../{domain => }/schedule/model/Schedule.java | 2 +- .../schedule/runner/SchedulerInitializer.java | 8 +- .../service/DynamicSchedulerService.java | 4 +- .../mybatis/mapper/ScheduleMapper.xml | 8 +- 20 files changed, 782 insertions(+), 160 deletions(-) create mode 100644 apps/user-service/src/main/java/site/icebang/batch/common/JobContextKeys.java create mode 100644 apps/user-service/src/main/java/site/icebang/batch/job/BlogAutomationJobConfig.java delete mode 100644 apps/user-service/src/main/java/site/icebang/batch/job/BlogContentJobConfig.java delete mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/ContentGenerationTasklet.java create mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/CrawlSelectedProductTasklet.java create mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/ExtractTrendKeywordTasklet.java create mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/FindSimilarProductsTasklet.java create mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/GenerateBlogContentTasklet.java delete mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/KeywordExtractionTasklet.java create mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/MatchProductWithKeywordTasklet.java create mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/PublishBlogPostTasklet.java create mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/SearchProductsFromMallTasklet.java create mode 100644 apps/user-service/src/main/java/site/icebang/external/fastapi/adapter/FastApiAdapter.java create mode 100644 apps/user-service/src/main/java/site/icebang/external/fastapi/dto/FastApiDto.java rename apps/user-service/src/main/java/site/icebang/{domain => }/schedule/mapper/ScheduleMapper.java (63%) rename apps/user-service/src/main/java/site/icebang/{domain => }/schedule/model/Schedule.java (84%) rename apps/user-service/src/main/java/site/icebang/{domain => }/schedule/runner/SchedulerInitializer.java (78%) rename apps/user-service/src/main/java/site/icebang/{domain => }/schedule/service/DynamicSchedulerService.java (95%) diff --git a/apps/user-service/src/main/java/site/icebang/batch/common/JobContextKeys.java b/apps/user-service/src/main/java/site/icebang/batch/common/JobContextKeys.java new file mode 100644 index 00000000..d28b7bd0 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/batch/common/JobContextKeys.java @@ -0,0 +1,15 @@ +package site.icebang.batch.common; + +/** + * Spring Batch의 JobExecutionContext에서 Step 간 데이터 공유를 위해 사용되는 Key들을 상수로 정의하는 인터페이스. 모든 Tasklet은 이 + * 인터페이스를 참조하여 데이터의 일관성을 유지합니다. + */ +public interface JobContextKeys { + + String EXTRACTED_KEYWORD = "extractedKeyword"; + String SEARCHED_PRODUCTS = "searchedProducts"; + String MATCHED_PRODUCTS = "matchedProducts"; + String SELECTED_PRODUCT = "selectedProduct"; + String CRAWLED_PRODUCT_DETAIL = "crawledProductDetail"; + String GENERATED_CONTENT = "generatedContent"; +} diff --git a/apps/user-service/src/main/java/site/icebang/batch/job/BlogAutomationJobConfig.java b/apps/user-service/src/main/java/site/icebang/batch/job/BlogAutomationJobConfig.java new file mode 100644 index 00000000..d0c934b9 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/batch/job/BlogAutomationJobConfig.java @@ -0,0 +1,115 @@ +package site.icebang.batch.job; // 패키지 경로 수정 + +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.transaction.PlatformTransactionManager; + +import lombok.RequiredArgsConstructor; + +import site.icebang.batch.tasklet.*; + +/** [배치 시스템 구현] 트렌드 기반 블로그 자동화 워크플로우를 구성하는 Job들을 정의합니다. */ +@Configuration +@RequiredArgsConstructor +public class BlogAutomationJobConfig { + + // --- Tasklets --- + private final ExtractTrendKeywordTasklet extractTrendKeywordTask; + private final SearchProductsFromMallTasklet searchProductsFromMallTask; + private final MatchProductWithKeywordTasklet matchProductWithKeywordTask; + private final FindSimilarProductsTasklet findSimilarProductsTask; + private final CrawlSelectedProductTasklet crawlSelectedProductTask; + private final GenerateBlogContentTasklet generateBlogContentTask; + private final PublishBlogPostTasklet publishBlogPostTask; + + /** Job 1: 상품 선정 및 정보 수집 키워드 추출부터 최종 상품 정보 크롤링까지의 과정을 책임집니다. */ + @Bean + public Job productSelectionJob( + JobRepository jobRepository, + Step extractTrendKeywordStep, + Step searchProductsFromMallStep, + Step matchProductWithKeywordStep, + Step findSimilarProductsStep, + Step crawlSelectedProductStep) { + return new JobBuilder("productSelectionJob", jobRepository) + .start(extractTrendKeywordStep) + .next(searchProductsFromMallStep) + .next(matchProductWithKeywordStep) + .next(findSimilarProductsStep) + .next(crawlSelectedProductStep) + .build(); + } + + /** Job 2: 콘텐츠 생성 및 발행 수집된 상품 정보로 블로그 콘텐츠를 생성하고 발행합니다. */ + @Bean + public Job contentPublishingJob( + JobRepository jobRepository, Step generateBlogContentStep, Step publishBlogPostStep) { + return new JobBuilder("contentPublishingJob", jobRepository) + .start(generateBlogContentStep) + .next(publishBlogPostStep) + .build(); + } + + // --- Steps for productSelectionJob --- + @Bean + public Step extractTrendKeywordStep( + JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("extractTrendKeywordStep", jobRepository) + .tasklet(extractTrendKeywordTask, transactionManager) + .build(); + } + + @Bean + public Step searchProductsFromMallStep( + JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("searchProductsFromMallStep", jobRepository) + .tasklet(searchProductsFromMallTask, transactionManager) + .build(); + } + + @Bean + public Step matchProductWithKeywordStep( + JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("matchProductWithKeywordStep", jobRepository) + .tasklet(matchProductWithKeywordTask, transactionManager) + .build(); + } + + @Bean + public Step findSimilarProductsStep( + JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("findSimilarProductsStep", jobRepository) + .tasklet(findSimilarProductsTask, transactionManager) + .build(); + } + + @Bean + public Step crawlSelectedProductStep( + JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("crawlSelectedProductStep", jobRepository) + .tasklet(crawlSelectedProductTask, transactionManager) + .build(); + } + + // --- Steps for contentPublishingJob --- + @Bean + public Step generateBlogContentStep( + JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("generateBlogContentStep", jobRepository) + .tasklet(generateBlogContentTask, transactionManager) + .build(); + } + + @Bean + public Step publishBlogPostStep( + JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("publishBlogPostStep", jobRepository) + .tasklet(publishBlogPostTask, transactionManager) + .build(); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/batch/job/BlogContentJobConfig.java b/apps/user-service/src/main/java/site/icebang/batch/job/BlogContentJobConfig.java deleted file mode 100644 index 5e85fe9f..00000000 --- a/apps/user-service/src/main/java/site/icebang/batch/job/BlogContentJobConfig.java +++ /dev/null @@ -1,51 +0,0 @@ -package site.icebang.batch.job; - -import org.springframework.batch.core.Job; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.job.builder.JobBuilder; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.step.builder.StepBuilder; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.transaction.PlatformTransactionManager; - -import lombok.RequiredArgsConstructor; - -import site.icebang.batch.tasklet.ContentGenerationTasklet; -import site.icebang.batch.tasklet.KeywordExtractionTasklet; - -@Configuration -@RequiredArgsConstructor -public class BlogContentJobConfig { - - // 변경점 1: Factory 대신 실제 Tasklet만 필드로 주입받습니다. - private final KeywordExtractionTasklet keywordExtractionTasklet; - private final ContentGenerationTasklet contentGenerationTasklet; - - @Bean - public Job blogContentJob( - JobRepository jobRepository, Step keywordExtractionStep, Step contentGenerationStep) { - return new JobBuilder("blogContentJob", jobRepository) // 변경점 2: JobBuilder를 직접 생성합니다. - .start(keywordExtractionStep) - .next(contentGenerationStep) - .build(); - } - - @Bean - public Step keywordExtractionStep( - JobRepository jobRepository, PlatformTransactionManager transactionManager) { - return new StepBuilder("keywordExtractionStep", jobRepository) // 변경점 3: StepBuilder를 직접 생성합니다. - .tasklet( - keywordExtractionTasklet, - transactionManager) // 변경점 4: tasklet에 transactionManager를 함께 전달합니다. - .build(); - } - - @Bean - public Step contentGenerationStep( - JobRepository jobRepository, PlatformTransactionManager transactionManager) { - return new StepBuilder("contentGenerationStep", jobRepository) - .tasklet(contentGenerationTasklet, transactionManager) - .build(); - } -} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/ContentGenerationTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/ContentGenerationTasklet.java deleted file mode 100644 index a6ef4505..00000000 --- a/apps/user-service/src/main/java/site/icebang/batch/tasklet/ContentGenerationTasklet.java +++ /dev/null @@ -1,49 +0,0 @@ -package site.icebang.batch.tasklet; - -import java.util.List; - -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.stereotype.Component; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -@Component -@RequiredArgsConstructor -public class ContentGenerationTasklet implements Tasklet { - - // private final ContentService contentService; // 비즈니스 로직을 담은 서비스 - // private final FastApiClient fastApiClient; // FastAPI 통신을 위한 클라이언트 - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) - throws Exception { - log.info(">>>> [Step 2] ContentGenerationTasklet executed."); - - // --- 핵심: JobExecutionContext에서 이전 Step의 결과물 가져오기 --- - ExecutionContext jobExecutionContext = - chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); - - // KeywordExtractionTasklet이 저장한 "extractedKeywordIds" Key로 데이터 조회 - List keywordIds = (List) jobExecutionContext.get("extractedKeywordIds"); - - if (keywordIds == null || keywordIds.isEmpty()) { - log.warn(">>>> No keyword IDs found from previous step. Skipping content generation."); - return RepeatStatus.FINISHED; - } - - log.info(">>>> Received Keyword IDs for content generation: {}", keywordIds); - - // TODO: 1. 전달받은 키워드 ID 목록으로 DB에서 상세 정보 조회 - // TODO: 2. 각 키워드/상품 정보에 대해 외부 AI 서비스(FastAPI/LangChain)를 호출하여 콘텐츠 생성을 요청 - // TODO: 3. 생성된 콘텐츠를 DB에 저장 - - log.info(">>>> [Step 2] ContentGenerationTasklet finished."); - return RepeatStatus.FINISHED; - } -} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/CrawlSelectedProductTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/CrawlSelectedProductTasklet.java new file mode 100644 index 00000000..6a182c37 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/batch/tasklet/CrawlSelectedProductTasklet.java @@ -0,0 +1,60 @@ +package site.icebang.batch.tasklet; + +import java.util.Map; + +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.stereotype.Component; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import site.icebang.batch.common.JobContextKeys; +import site.icebang.external.fastapi.adapter.FastApiAdapter; +import site.icebang.external.fastapi.dto.FastApiDto.RequestSsadaguCrawl; +import site.icebang.external.fastapi.dto.FastApiDto.ResponseSsadaguCrawl; + +@Slf4j +@Component +@RequiredArgsConstructor +public class CrawlSelectedProductTasklet implements Tasklet { + + private final FastApiAdapter fastApiAdapter; + + @Override + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { + // log.info(">>>> [Step 5] 최종 상품 크롤링 Tasklet 실행 시작"); + + ExecutionContext jobExecutionContext = getJobExecutionContext(chunkContext); + Map selectedProduct = + (Map) jobExecutionContext.get(JobContextKeys.SELECTED_PRODUCT); + + if (selectedProduct == null || !selectedProduct.containsKey("link")) { + throw new RuntimeException("크롤링할 상품 URL이 없습니다."); + } + String productUrl = (String) selectedProduct.get("link"); + + RequestSsadaguCrawl request = new RequestSsadaguCrawl(1, 1, null, "detail", productUrl); + ResponseSsadaguCrawl response = fastApiAdapter.requestProductCrawl(request); + + if (response == null || !"200".equals(response.status())) { + throw new RuntimeException("FastAPI 상품 크롤링에 실패했습니다."); + } + + Map productDetail = response.productDetail(); + log.info(">>>> FastAPI로부터 크롤링된 상품 상세 정보 획득"); + + jobExecutionContext.put(JobContextKeys.CRAWLED_PRODUCT_DETAIL, productDetail); + + // log.info(">>>> [Step 5] 최종 상품 크롤링 Tasklet 실행 완료"); + return RepeatStatus.FINISHED; + } + + private ExecutionContext getJobExecutionContext(ChunkContext chunkContext) { + return chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/ExtractTrendKeywordTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/ExtractTrendKeywordTasklet.java new file mode 100644 index 00000000..a35bebf9 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/batch/tasklet/ExtractTrendKeywordTasklet.java @@ -0,0 +1,51 @@ +package site.icebang.batch.tasklet; + +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.stereotype.Component; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import site.icebang.batch.common.JobContextKeys; +import site.icebang.external.fastapi.adapter.FastApiAdapter; +import site.icebang.external.fastapi.dto.FastApiDto.RequestNaverSearch; +import site.icebang.external.fastapi.dto.FastApiDto.ResponseNaverSearch; + +@Slf4j +@Component +@RequiredArgsConstructor +public class ExtractTrendKeywordTasklet implements Tasklet { + + private final FastApiAdapter fastApiAdapter; + + @Override + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { + // log.info(">>>> [Step 1] 키워드 추출 Tasklet 실행 시작"); + + RequestNaverSearch request = + new RequestNaverSearch(1, 1, null, "naver", "50000000", null, null); + ResponseNaverSearch response = fastApiAdapter.requestNaverKeywordSearch(request); + + if (response == null || !"200".equals(response.status())) { + throw new RuntimeException("FastAPI로부터 키워드를 추출하는 데 실패했습니다."); + } + String extractedKeyword = response.keyword(); + log.info(">>>> FastAPI로부터 추출된 키워드: {}", extractedKeyword); + + ExecutionContext jobExecutionContext = getJobExecutionContext(chunkContext); + // 다른 클래스의 상수를 직접 참조하는 대신 공용 인터페이스의 키를 사용 + jobExecutionContext.put(JobContextKeys.EXTRACTED_KEYWORD, extractedKeyword); + + // log.info(">>>> [Step 1] 키워드 추출 Tasklet 실행 완료"); + return RepeatStatus.FINISHED; + } + + private ExecutionContext getJobExecutionContext(ChunkContext chunkContext) { + return chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/FindSimilarProductsTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/FindSimilarProductsTasklet.java new file mode 100644 index 00000000..316641e1 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/batch/tasklet/FindSimilarProductsTasklet.java @@ -0,0 +1,60 @@ +package site.icebang.batch.tasklet; + +import java.util.List; +import java.util.Map; + +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.stereotype.Component; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import site.icebang.batch.common.JobContextKeys; +import site.icebang.external.fastapi.adapter.FastApiAdapter; +import site.icebang.external.fastapi.dto.FastApiDto.RequestSsadaguSimilarity; +import site.icebang.external.fastapi.dto.FastApiDto.ResponseSsadaguSimilarity; + +@Slf4j +@Component +@RequiredArgsConstructor +public class FindSimilarProductsTasklet implements Tasklet { + + private final FastApiAdapter fastApiAdapter; + + @Override + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { + // log.info(">>>> [Step 4] 상품 유사도 분석 Tasklet 실행 시작"); + + ExecutionContext jobExecutionContext = getJobExecutionContext(chunkContext); + String keyword = (String) jobExecutionContext.get(JobContextKeys.EXTRACTED_KEYWORD); + List> matchedProducts = + (List>) jobExecutionContext.get(JobContextKeys.MATCHED_PRODUCTS); + List> searchResults = + (List>) jobExecutionContext.get(JobContextKeys.SEARCHED_PRODUCTS); + + RequestSsadaguSimilarity request = + new RequestSsadaguSimilarity(1, 1, null, keyword, matchedProducts, searchResults); + ResponseSsadaguSimilarity response = fastApiAdapter.requestProductSimilarity(request); + + if (response == null || !"200".equals(response.status())) { + throw new RuntimeException("FastAPI 상품 유사도 분석에 실패했습니다."); + } + + Map selectedProduct = response.selectedProduct(); + log.info(">>>> FastAPI로부터 최종 선택된 상품: {}", selectedProduct.get("title")); + + jobExecutionContext.put(JobContextKeys.SELECTED_PRODUCT, selectedProduct); + + // log.info(">>>> [Step 4] 상품 유사도 분석 Tasklet 실행 완료"); + return RepeatStatus.FINISHED; + } + + private ExecutionContext getJobExecutionContext(ChunkContext chunkContext) { + return chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/GenerateBlogContentTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/GenerateBlogContentTasklet.java new file mode 100644 index 00000000..ecf44cbb --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/batch/tasklet/GenerateBlogContentTasklet.java @@ -0,0 +1,62 @@ +package site.icebang.batch.tasklet; + +import java.util.List; +import java.util.Map; + +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.stereotype.Component; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import site.icebang.batch.common.JobContextKeys; +import site.icebang.external.fastapi.adapter.FastApiAdapter; +import site.icebang.external.fastapi.dto.FastApiDto.RequestBlogCreate; +import site.icebang.external.fastapi.dto.FastApiDto.ResponseBlogCreate; + +@Slf4j +@Component +@RequiredArgsConstructor +public class GenerateBlogContentTasklet implements Tasklet { + + private final FastApiAdapter fastApiAdapter; + + @Override + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { + // log.info(">>>> [Step 6] 블로그 콘텐츠 생성 Tasklet 실행 시작"); + + ExecutionContext jobExecutionContext = getJobExecutionContext(chunkContext); + Map productDetail = + (Map) jobExecutionContext.get(JobContextKeys.CRAWLED_PRODUCT_DETAIL); + + // TODO: productDetail을 기반으로 LLM에 전달할 프롬프트 생성 + RequestBlogCreate request = new RequestBlogCreate(1, 1, null); + ResponseBlogCreate response = fastApiAdapter.requestBlogCreation(request); + + if (response == null || !"200".equals(response.status())) { + throw new RuntimeException("FastAPI 블로그 콘텐츠 생성에 실패했습니다."); + } + + // TODO: 실제 생성된 콘텐츠를 response로부터 받아와야 함 (현재는 더미 데이터) + Map generatedContent = + Map.of( + "title", "엄청난 상품을 소개합니다! " + productDetail.get("title"), + "content", "이 상품은 정말... 좋습니다. 상세 정보: " + productDetail.toString(), + "tags", List.of("상품리뷰", "최고")); + log.info(">>>> FastAPI로부터 블로그 콘텐츠 생성 완료"); + + jobExecutionContext.put(JobContextKeys.GENERATED_CONTENT, generatedContent); + + // log.info(">>>> [Step 6] 블로그 콘텐츠 생성 Tasklet 실행 완료"); + return RepeatStatus.FINISHED; + } + + private ExecutionContext getJobExecutionContext(ChunkContext chunkContext) { + return chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/KeywordExtractionTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/KeywordExtractionTasklet.java deleted file mode 100644 index ebc27117..00000000 --- a/apps/user-service/src/main/java/site/icebang/batch/tasklet/KeywordExtractionTasklet.java +++ /dev/null @@ -1,47 +0,0 @@ -package site.icebang.batch.tasklet; - -import java.util.List; - -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.stereotype.Component; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -@Component -@RequiredArgsConstructor -public class KeywordExtractionTasklet implements Tasklet { - - // private final TrendKeywordService trendKeywordService; // 비즈니스 로직을 담은 서비스 - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) - throws Exception { - log.info(">>>> [Step 1] KeywordExtractionTasklet executed."); - - // TODO: 1. DB에서 카테고리 정보 조회 - // TODO: 2. 외부 API 또는 내부 로직을 통해 트렌드 키워드 추출 - // TODO: 3. 추출된 키워드를 DB에 저장 - - // --- 핵심: 다음 Step에 전달할 데이터 생성 --- - // 예시: 새로 생성된 키워드 ID 목록을 가져왔다고 가정 - List extractedKeywordIds = List.of(1L, 2L, 3L); // 실제로는 DB 저장 후 반환된 ID 목록 - log.info(">>>> Extracted Keyword IDs: {}", extractedKeywordIds); - - // --- 핵심: JobExecutionContext에 결과물 저장 --- - // JobExecution 전체에서 공유되는 컨텍스트를 가져옵니다. - ExecutionContext jobExecutionContext = - chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); - - // "extractedKeywordIds" 라는 Key로 데이터 저장 - jobExecutionContext.put("extractedKeywordIds", extractedKeywordIds); - - log.info(">>>> [Step 1] KeywordExtractionTasklet finished."); - return RepeatStatus.FINISHED; - } -} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/MatchProductWithKeywordTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/MatchProductWithKeywordTasklet.java new file mode 100644 index 00000000..bdb15200 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/batch/tasklet/MatchProductWithKeywordTasklet.java @@ -0,0 +1,57 @@ +package site.icebang.batch.tasklet; + +import java.util.List; +import java.util.Map; + +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.stereotype.Component; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import site.icebang.batch.common.JobContextKeys; +import site.icebang.external.fastapi.adapter.FastApiAdapter; +import site.icebang.external.fastapi.dto.FastApiDto.RequestSsadaguMatch; +import site.icebang.external.fastapi.dto.FastApiDto.ResponseSsadaguMatch; + +@Slf4j +@Component +@RequiredArgsConstructor +public class MatchProductWithKeywordTasklet implements Tasklet { + + private final FastApiAdapter fastApiAdapter; + + @Override + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { + // log.info(">>>> [Step 3] 상품 매칭 Tasklet 실행 시작"); + + ExecutionContext jobExecutionContext = getJobExecutionContext(chunkContext); + String keyword = (String) jobExecutionContext.get(JobContextKeys.EXTRACTED_KEYWORD); + List> searchResults = + (List>) jobExecutionContext.get(JobContextKeys.SEARCHED_PRODUCTS); + + RequestSsadaguMatch request = new RequestSsadaguMatch(1, 1, null, keyword, searchResults); + ResponseSsadaguMatch response = fastApiAdapter.requestProductMatch(request); + + if (response == null || !"200".equals(response.status())) { + throw new RuntimeException("FastAPI 상품 매칭에 실패했습니다."); + } + + List> matchedProducts = response.matchedProducts(); + log.info(">>>> FastAPI로부터 매칭된 상품 {}개", matchedProducts.size()); + + jobExecutionContext.put(JobContextKeys.MATCHED_PRODUCTS, matchedProducts); + + log.info(">>>> [Step 3] 상품 매칭 Tasklet 실행 완료"); + return RepeatStatus.FINISHED; + } + + private ExecutionContext getJobExecutionContext(ChunkContext chunkContext) { + return chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/PublishBlogPostTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/PublishBlogPostTasklet.java new file mode 100644 index 00000000..e1b75a18 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/batch/tasklet/PublishBlogPostTasklet.java @@ -0,0 +1,68 @@ +package site.icebang.batch.tasklet; + +import java.util.List; +import java.util.Map; + +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.stereotype.Component; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import site.icebang.batch.common.JobContextKeys; +import site.icebang.external.fastapi.adapter.FastApiAdapter; +import site.icebang.external.fastapi.dto.FastApiDto.RequestBlogPublish; +import site.icebang.external.fastapi.dto.FastApiDto.ResponseBlogPublish; + +@Slf4j +@Component +@RequiredArgsConstructor +public class PublishBlogPostTasklet implements Tasklet { + + private final FastApiAdapter fastApiAdapter; + + @Override + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { + // log.info(">>>> [Step 7] 블로그 발행 Tasklet 실행 시작"); + + ExecutionContext jobExecutionContext = getJobExecutionContext(chunkContext); + Map content = + (Map) jobExecutionContext.get(JobContextKeys.GENERATED_CONTENT); + + // TODO: UserConfig 등에서 실제 블로그 정보(ID, PW)를 가져와야 함 + String blogId = "my_blog_id"; + String blogPw = "my_blog_password"; + + RequestBlogPublish request = + new RequestBlogPublish( + 1, + 1, + null, + "naver", + blogId, + blogPw, + (String) content.get("title"), + (String) content.get("content"), + (List) content.get("tags")); + + ResponseBlogPublish response = fastApiAdapter.requestBlogPost(request); + + if (response == null || !"200".equals(response.status())) { + throw new RuntimeException("FastAPI 블로그 발행에 실패했습니다."); + } + + log.info(">>>> FastAPI를 통해 블로그 발행 성공: {}", response.metadata()); + + // log.info(">>>> [Step 7] 블로그 발행 Tasklet 실행 완료"); + return RepeatStatus.FINISHED; + } + + private ExecutionContext getJobExecutionContext(ChunkContext chunkContext) { + return chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/SearchProductsFromMallTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/SearchProductsFromMallTasklet.java new file mode 100644 index 00000000..3480f391 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/batch/tasklet/SearchProductsFromMallTasklet.java @@ -0,0 +1,58 @@ +package site.icebang.batch.tasklet; + +import java.util.List; +import java.util.Map; + +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.stereotype.Component; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import site.icebang.batch.common.JobContextKeys; +import site.icebang.external.fastapi.adapter.FastApiAdapter; +import site.icebang.external.fastapi.dto.FastApiDto.RequestSsadaguSearch; +import site.icebang.external.fastapi.dto.FastApiDto.ResponseSsadaguSearch; + +@Slf4j +@Component +@RequiredArgsConstructor +public class SearchProductsFromMallTasklet implements Tasklet { + + private final FastApiAdapter fastApiAdapter; + + @Override + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { + // log.info(">>>> [Step 2] 상품 검색 Tasklet 실행 시작"); + + ExecutionContext jobExecutionContext = getJobExecutionContext(chunkContext); + String keyword = (String) jobExecutionContext.get(JobContextKeys.EXTRACTED_KEYWORD); + + if (keyword == null) { + throw new RuntimeException("이전 Step에서 키워드를 전달받지 못했습니다."); + } + + RequestSsadaguSearch request = new RequestSsadaguSearch(1, 1, null, keyword); + ResponseSsadaguSearch response = fastApiAdapter.requestSsadaguProductSearch(request); + + if (response == null || !"200".equals(response.status())) { + throw new RuntimeException("FastAPI 상품 검색에 실패했습니다."); + } + List> searchResults = response.searchResults(); + log.info(">>>> FastAPI로부터 검색된 상품 {}개", searchResults.size()); + + jobExecutionContext.put(JobContextKeys.SEARCHED_PRODUCTS, searchResults); + + // log.info(">>>> [Step 2] 상품 검색 Tasklet 실행 완료"); + return RepeatStatus.FINISHED; + } + + private ExecutionContext getJobExecutionContext(ChunkContext chunkContext) { + return chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/external/fastapi/adapter/FastApiAdapter.java b/apps/user-service/src/main/java/site/icebang/external/fastapi/adapter/FastApiAdapter.java new file mode 100644 index 00000000..e4e81a73 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/external/fastapi/adapter/FastApiAdapter.java @@ -0,0 +1,106 @@ +package site.icebang.external.fastapi.adapter; + +import org.springframework.stereotype.Component; +import org.springframework.web.client.RestClientException; +import org.springframework.web.client.RestTemplate; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import site.icebang.external.fastapi.dto.FastApiDto.*; +import site.icebang.global.config.properties.FastApiProperties; + +/** FastAPI 서버와의 통신을 전담하는 어댑터 클래스. 모든 외부 API 호출은 이 클래스를 통해 이루어집니다. */ +@Slf4j +@Component +@RequiredArgsConstructor +public class FastApiAdapter { + + private final RestTemplate restTemplate; + private final FastApiProperties properties; + + /** TASK 1: 네이버 키워드 추출을 FastAPI에 요청합니다. */ + public ResponseNaverSearch requestNaverKeywordSearch(RequestNaverSearch request) { + String url = properties.getUrl() + "/keyword/search"; + log.info("Requesting to FastAPI [POST {}]", url); + try { + return restTemplate.postForObject(url, request, ResponseNaverSearch.class); + } catch (RestClientException e) { + log.error("Failed to call FastAPI keyword search API. Error: {}", e.getMessage()); + // TODO: 비즈니스 요구사항에 맞는 예외 처리 (재시도, 기본값 반환, 특정 예외 던지기 등) + return null; + } + } + + /** TASK 2: 싸다구몰 상품 검색을 FastAPI에 요청합니다. */ + public ResponseSsadaguSearch requestSsadaguProductSearch(RequestSsadaguSearch request) { + String url = properties.getUrl() + "/product/search"; + log.info("Requesting to FastAPI [POST {}]", url); + try { + return restTemplate.postForObject(url, request, ResponseSsadaguSearch.class); + } catch (RestClientException e) { + log.error("Failed to call FastAPI product search API. Error: {}", e.getMessage()); + return null; + } + } + + /** TASK 3: 상품 매칭을 FastAPI에 요청합니다. */ + public ResponseSsadaguMatch requestProductMatch(RequestSsadaguMatch request) { + String url = properties.getUrl() + "/product/match"; + log.info("Requesting to FastAPI [POST {}]", url); + try { + return restTemplate.postForObject(url, request, ResponseSsadaguMatch.class); + } catch (RestClientException e) { + log.error("Failed to call FastAPI product match API. Error: {}", e.getMessage()); + return null; + } + } + + /** TASK 4: 상품 유사도 분석을 FastAPI에 요청합니다. (메서드명 수정) */ + public ResponseSsadaguSimilarity requestProductSimilarity(RequestSsadaguSimilarity request) { + String url = properties.getUrl() + "/product/similarity"; + log.info("Requesting to FastAPI [POST {}]", url); + try { + return restTemplate.postForObject(url, request, ResponseSsadaguSimilarity.class); + } catch (RestClientException e) { + log.error("Failed to call FastAPI product similarity API. Error: {}", e.getMessage()); + return null; + } + } + + /** TASK 5: 상품 상세 정보 크롤링을 FastAPI에 요청합니다. */ + public ResponseSsadaguCrawl requestProductCrawl(RequestSsadaguCrawl request) { + String url = properties.getUrl() + "/product/crawl"; + log.info("Requesting to FastAPI [POST {}]", url); + try { + return restTemplate.postForObject(url, request, ResponseSsadaguCrawl.class); + } catch (RestClientException e) { + log.error("Failed to call FastAPI product crawl API. Error: {}", e.getMessage()); + return null; + } + } + + /** TASK 6: 블로그 콘텐츠 생성을 FastAPI에 요청합니다. */ + public ResponseBlogCreate requestBlogCreation(RequestBlogCreate request) { + String url = properties.getUrl() + "/blog/rag/create"; + log.info("Requesting to FastAPI [POST {}]", url); + try { + return restTemplate.postForObject(url, request, ResponseBlogCreate.class); + } catch (RestClientException e) { + log.error("Failed to call FastAPI blog creation API. Error: {}", e.getMessage()); + return null; + } + } + + /** TASK 7: 블로그 발행을 FastAPI에 요청합니다. */ + public ResponseBlogPublish requestBlogPost(RequestBlogPublish request) { + String url = properties.getUrl() + "/blog/publish"; + log.info("Requesting to FastAPI [POST {}]", url); + try { + return restTemplate.postForObject(url, request, ResponseBlogPublish.class); + } catch (RestClientException e) { + log.error("Failed to call FastAPI blog publish API. Error: {}", e.getMessage()); + return null; + } + } +} diff --git a/apps/user-service/src/main/java/site/icebang/external/fastapi/dto/FastApiDto.java b/apps/user-service/src/main/java/site/icebang/external/fastapi/dto/FastApiDto.java new file mode 100644 index 00000000..88ffe284 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/external/fastapi/dto/FastApiDto.java @@ -0,0 +1,103 @@ +package site.icebang.external.fastapi.dto; + +import java.util.List; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** FastAPI 서버와 통신하기 위한 DTO 클래스 모음. Java의 record를 사용하여 불변 데이터 객체를 간결하게 정의합니다. */ +public final class FastApiDto { + + // --- 1. 네이버 키워드 추출 --- + public record RequestNaverSearch( + @JsonProperty("job_id") int jobId, + @JsonProperty("schedule_id") int scheduleId, + @JsonProperty("schedule_his_id") Integer scheduleHisId, + String tag, + String category, + @JsonProperty("start_date") String startDate, + @JsonProperty("end_date") String endDate) {} + + public record ResponseNaverSearch( + String status, + String category, + String keyword, + @JsonProperty("total_keyword") Map totalKeyword) {} + + // --- 2. 상품 검색 --- + public record RequestSsadaguSearch( + @JsonProperty("job_id") int jobId, + @JsonProperty("schedule_id") int scheduleId, + @JsonProperty("schedule_his_id") Integer scheduleHisId, + String keyword) {} + + public record ResponseSsadaguSearch( + String status, + String keyword, + @JsonProperty("search_results") List> searchResults) {} + + // --- 3. 상품 매칭 --- + public record RequestSsadaguMatch( + @JsonProperty("job_id") int jobId, + @JsonProperty("schedule_id") int scheduleId, + @JsonProperty("schedule_his_id") Integer scheduleHisId, + String keyword, + @JsonProperty("search_results") List> searchResults) {} + + public record ResponseSsadaguMatch( + String status, + String keyword, + @JsonProperty("matched_products") List> matchedProducts) {} + + // --- 4. 상품 유사도 --- + public record RequestSsadaguSimilarity( + @JsonProperty("job_id") int jobId, + @JsonProperty("schedule_id") int scheduleId, + @JsonProperty("schedule_his_id") Integer scheduleHisId, + String keyword, + @JsonProperty("matched_products") List> matchedProducts, + @JsonProperty("search_results") List> searchResults) {} + + public record ResponseSsadaguSimilarity( + String status, + String keyword, + @JsonProperty("selected_product") Map selectedProduct, + String reason) {} + + // --- 5. 상품 크롤링 --- + public record RequestSsadaguCrawl( + @JsonProperty("job_id") int jobId, + @JsonProperty("schedule_id") int scheduleId, + @JsonProperty("schedule_his_id") Integer scheduleHisId, + String tag, + @JsonProperty("product_url") String productUrl) {} + + public record ResponseSsadaguCrawl( + String status, + String tag, + @JsonProperty("product_url") String productUrl, + @JsonProperty("product_detail") Map productDetail, + @JsonProperty("crawled_at") String crawledAt) {} + + // --- 6. 블로그 콘텐츠 생성 --- + public record RequestBlogCreate( + @JsonProperty("job_id") int jobId, + @JsonProperty("schedule_id") int scheduleId, + @JsonProperty("schedule_his_id") Integer scheduleHisId) {} + + public record ResponseBlogCreate(String status) {} + + // --- 7. 블로그 발행 --- + public record RequestBlogPublish( + @JsonProperty("job_id") int jobId, + @JsonProperty("schedule_id") int scheduleId, + @JsonProperty("schedule_his_id") Integer scheduleHisId, + String tag, + @JsonProperty("blog_id") String blogId, + @JsonProperty("blog_pw") String blogPw, + @JsonProperty("post_title") String postTitle, + @JsonProperty("post_content") String postContent, + @JsonProperty("post_tags") List postTags) {} + + public record ResponseBlogPublish(String status, Map metadata) {} +} diff --git a/apps/user-service/src/main/java/site/icebang/global/aop/logging/LoggingAspect.java b/apps/user-service/src/main/java/site/icebang/global/aop/logging/LoggingAspect.java index 126c7d35..b1806cff 100644 --- a/apps/user-service/src/main/java/site/icebang/global/aop/logging/LoggingAspect.java +++ b/apps/user-service/src/main/java/site/icebang/global/aop/logging/LoggingAspect.java @@ -22,6 +22,9 @@ public void serviceMethods() {} @Pointcut("execution(public * site.icebang..service..mapper..*(..))") public void repositoryMethods() {} + @Pointcut("execution(public * site.icebang.batch.tasklet..*(..))") + public void taskletMethods() {} + @Around("controllerMethods()") public Object logController(ProceedingJoinPoint joinPoint) throws Throwable { long start = System.currentTimeMillis(); @@ -51,4 +54,15 @@ public Object logRepository(ProceedingJoinPoint joinPoint) throws Throwable { log.debug("[REPOSITORY] End: {} ({}ms)", joinPoint.getSignature(), duration); return result; } + + @Around("taskletMethods()") + public Object logTasklet(ProceedingJoinPoint joinPoint) throws Throwable { + long start = System.currentTimeMillis(); + // Tasklet 이름만으로도 구분이 되므로, 클래스명 + 메서드명으로 로그를 남깁니다. + log.info(">>>> [TASKLET] Start: {}", joinPoint.getSignature().toShortString()); + Object result = joinPoint.proceed(); // 실제 Tasklet의 execute() 메서드 실행 + long duration = System.currentTimeMillis() - start; + log.info("<<<< [TASKLET] End: {} ({}ms)", joinPoint.getSignature().toShortString(), duration); + return result; + } } diff --git a/apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java b/apps/user-service/src/main/java/site/icebang/schedule/mapper/ScheduleMapper.java similarity index 63% rename from apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java rename to apps/user-service/src/main/java/site/icebang/schedule/mapper/ScheduleMapper.java index c757fc36..b1a92f1e 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java +++ b/apps/user-service/src/main/java/site/icebang/schedule/mapper/ScheduleMapper.java @@ -1,10 +1,10 @@ -package site.icebang.domain.schedule.mapper; +package site.icebang.schedule.mapper; import java.util.List; import org.apache.ibatis.annotations.Mapper; -import site.icebang.domain.schedule.model.Schedule; +import site.icebang.schedule.model.Schedule; @Mapper public interface ScheduleMapper { diff --git a/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java b/apps/user-service/src/main/java/site/icebang/schedule/model/Schedule.java similarity index 84% rename from apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java rename to apps/user-service/src/main/java/site/icebang/schedule/model/Schedule.java index 65c48366..ced2900c 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java +++ b/apps/user-service/src/main/java/site/icebang/schedule/model/Schedule.java @@ -1,4 +1,4 @@ -package site.icebang.domain.schedule.model; +package site.icebang.schedule.model; import lombok.Getter; import lombok.Setter; diff --git a/apps/user-service/src/main/java/site/icebang/domain/schedule/runner/SchedulerInitializer.java b/apps/user-service/src/main/java/site/icebang/schedule/runner/SchedulerInitializer.java similarity index 78% rename from apps/user-service/src/main/java/site/icebang/domain/schedule/runner/SchedulerInitializer.java rename to apps/user-service/src/main/java/site/icebang/schedule/runner/SchedulerInitializer.java index 0dfb8b33..ee8580dd 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/schedule/runner/SchedulerInitializer.java +++ b/apps/user-service/src/main/java/site/icebang/schedule/runner/SchedulerInitializer.java @@ -1,4 +1,4 @@ -package site.icebang.domain.schedule.runner; +package site.icebang.schedule.runner; import java.util.List; @@ -9,9 +9,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import site.icebang.domain.schedule.mapper.ScheduleMapper; -import site.icebang.domain.schedule.model.Schedule; -import site.icebang.domain.schedule.service.DynamicSchedulerService; +import site.icebang.schedule.mapper.ScheduleMapper; +import site.icebang.schedule.model.Schedule; +import site.icebang.schedule.service.DynamicSchedulerService; @Slf4j @Component diff --git a/apps/user-service/src/main/java/site/icebang/domain/schedule/service/DynamicSchedulerService.java b/apps/user-service/src/main/java/site/icebang/schedule/service/DynamicSchedulerService.java similarity index 95% rename from apps/user-service/src/main/java/site/icebang/domain/schedule/service/DynamicSchedulerService.java rename to apps/user-service/src/main/java/site/icebang/schedule/service/DynamicSchedulerService.java index 372e0e1d..b81e30eb 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/schedule/service/DynamicSchedulerService.java +++ b/apps/user-service/src/main/java/site/icebang/schedule/service/DynamicSchedulerService.java @@ -1,4 +1,4 @@ -package site.icebang.domain.schedule.service; +package site.icebang.schedule.service; import java.time.LocalDateTime; import java.util.Map; @@ -16,7 +16,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import site.icebang.domain.schedule.model.Schedule; +import site.icebang.schedule.model.Schedule; @Slf4j @Service diff --git a/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml index 3cdcc90e..f9629b8a 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml @@ -1,16 +1,16 @@ - + - SELECT id AS scheduleId, workflow_id AS workflowId, cron_expression AS cronExpression, is_active AS isActive - FROM + FROM schedule - WHERE + WHERE is_active = #{isActive} From 1acd33c8a3aff9b99bf3276ed17739f64522ba5f Mon Sep 17 00:00:00 2001 From: thkim7 Date: Tue, 16 Sep 2025 11:06:31 +0900 Subject: [PATCH 22/57] =?UTF-8?q?chore:=20=EC=BD=94=EB=93=9C=EC=97=90=20?= =?UTF-8?q?=EC=9E=88=EB=8A=94=20job=5Fid=20=EC=A0=84=EB=B6=80=20=EC=A0=9C?= =?UTF-8?q?=EA=B1=B0=201.=20middleware=20=EC=88=98=EC=A0=95=ED=95=B4?= =?UTF-8?q?=EC=95=BC=ED=95=A8(=EA=B2=BD=EB=AF=BC)=202.=20pytest=20all=20pa?= =?UTF-8?q?ss?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/api/endpoints/test.py | 13 ++++--------- .../app/model/schemas.py | 18 ------------------ .../app/service/crawl_service.py | 6 +++--- .../app/service/search_service.py | 8 +------- .../app/service/similarity_service.py | 7 ++----- 5 files changed, 10 insertions(+), 42 deletions(-) diff --git a/apps/pre-processing-service/app/api/endpoints/test.py b/apps/pre-processing-service/app/api/endpoints/test.py index 9e17a7c4..91977a3f 100644 --- a/apps/pre-processing-service/app/api/endpoints/test.py +++ b/apps/pre-processing-service/app/api/endpoints/test.py @@ -62,11 +62,6 @@ def with_meta(data: Mapping[str, Any], meta: Mapping[str, Any]) -> Dict[str, Any @router.get("/tester", response_model=None) async def processing_tester(): - meta = { - "job_id": 1, - "schedule_id": 1, - "schedule_his_id": 1, # ✅ 타이포 수정 - } request_dict = { "tag": "naver", "category": "50000000", @@ -74,7 +69,7 @@ async def processing_tester(): "end_date": "2025-09-02", } # 네이버 키워드 검색 - naver_request = RequestNaverSearch(**with_meta(meta, request_dict)) + naver_request = RequestNaverSearch(**with_meta(request_dict)) response_data = await keyword_search(naver_request) keyword = response_data.get("keyword") loguru.logger.info(keyword) @@ -84,21 +79,21 @@ async def processing_tester(): } # 싸다구 상품 검색 - sadagu_request = RequestSadaguSearch(**with_meta(meta, keyword)) + sadagu_request = RequestSadaguSearch(**with_meta(keyword)) search_service = SearchService() keyword_result = await search_service.search_products(sadagu_request) loguru.logger.info(keyword_result) # 싸다구 상품 매치 keyword["search_results"] = keyword_result.get("search_results") - keyword_match_request = RequestSadaguMatch(**with_meta(meta, keyword)) + keyword_match_request = RequestSadaguMatch(**with_meta(keyword)) match_service = MatchService() keyword_match_response = match_service.match_products(keyword_match_request) loguru.logger.info(keyword_match_response) # 싸다구 상품 유사도 분석 keyword["matched_products"] = keyword_match_response.get("matched_products") - keyword_similarity_request = RequestSadaguSimilarity(**with_meta(meta, keyword)) + keyword_similarity_request = RequestSadaguSimilarity(**with_meta(keyword)) # similarity_service = SimilarityService() # keyword_similarity_response = similarity_service.select_product_by_similarity( # keyword_similarity_request diff --git a/apps/pre-processing-service/app/model/schemas.py b/apps/pre-processing-service/app/model/schemas.py index 52775416..9581ad0f 100644 --- a/apps/pre-processing-service/app/model/schemas.py +++ b/apps/pre-processing-service/app/model/schemas.py @@ -5,29 +5,11 @@ # 기본 요청 class RequestBase(BaseModel): - # job_id: int = Field( - # ..., title="작업 ID", description="현재 실행 중인 작업의 고유 식별자" - # ) - # schedule_id: int = Field( - # ..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자" - # ) - # schedule_his_id: Optional[int] = Field( - # None, title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자" - # ) pass # 기본 응답 class ResponseBase(BaseModel): - # job_id: int = Field( - # ..., title="작업 ID", description="현재 실행 중인 작업의 고유 식별자" - # ) - # schedule_id: int = Field( - # ..., title="스케줄 ID", description="예약된 스케줄의 고유 식별자" - # ) - # schedule_his_id: Optional[int] = Field( - # None, title="스케줄 히스토리 ID", description="스케줄 실행 이력의 고유 식별자" - # ) status: str = Field(..., title="상태", description="요청 처리 상태") pass diff --git a/apps/pre-processing-service/app/service/crawl_service.py b/apps/pre-processing-service/app/service/crawl_service.py index 8543658e..7d6a8d1a 100644 --- a/apps/pre-processing-service/app/service/crawl_service.py +++ b/apps/pre-processing-service/app/service/crawl_service.py @@ -18,7 +18,7 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: try: logger.info( - f"상품 상세 크롤링 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, product_url={request.product_url}" + f"상품 상세 크롤링 서비스 시작: product_url={request.product_url}" ) # 상세 정보 크롤링 실행 @@ -45,13 +45,13 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: } logger.info( - f"상품 상세 크롤링 서비스 완료: job_id={request.job_id}, status=success" + f"상품 상세 크롤링 서비스 완료: status=success" ) return response_data except Exception as e: logger.error( - f"크롤링 서비스 오류: job_id={request.job_id}, product_url={request.product_url}, error='{e}'" + f"크롤링 서비스 오류: product_url={request.product_url}, error='{e}'" ) raise InvalidItemDataException() finally: diff --git a/apps/pre-processing-service/app/service/search_service.py b/apps/pre-processing-service/app/service/search_service.py index f7255e61..ec4ca59c 100644 --- a/apps/pre-processing-service/app/service/search_service.py +++ b/apps/pre-processing-service/app/service/search_service.py @@ -30,9 +30,6 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: if not search_results: logger.warning(f"검색 결과가 없습니다: keyword='{keyword}'") return { - # "job_id": request.job_id, - # "schedule_id": request.schedule_id, - # "schedule_his_id": request.schedule_his_id, "keyword": keyword, "search_results": [], "status": "success", @@ -91,9 +88,6 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: ) return { - # "job_id": request.job_id, - # "schedule_id": request.schedule_id, - # "schedule_his_id": request.schedule_his_id, "keyword": keyword, "search_results": enriched_results, "status": "success", @@ -101,7 +95,7 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: except Exception as e: logger.error( - f"검색 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'" + f"검색 서비스 오류: keyword='{keyword}', error='{e}'" ) raise InvalidItemDataException() diff --git a/apps/pre-processing-service/app/service/similarity_service.py b/apps/pre-processing-service/app/service/similarity_service.py index 9015bd94..0241fca3 100644 --- a/apps/pre-processing-service/app/service/similarity_service.py +++ b/apps/pre-processing-service/app/service/similarity_service.py @@ -17,7 +17,7 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict fallback_products = request.search_results or [] logger.info( - f"유사도 분석 서비스 시작: job_id={request.job_id}, keyword='{keyword}', matched_count={len(candidates) if candidates else 0}, fallback_count={len(fallback_products)}" + f"유사도 분석 서비스 시작: keyword='{keyword}', matched_count={len(candidates) if candidates else 0}, fallback_count={len(fallback_products)}" ) # 매칭된 상품이 없으면 전체 검색 결과로 폴백 @@ -64,9 +64,6 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict f"단일 상품 유사도 미달: similarity={similarity:.4f} < threshold={similarity_threshold}" ) return { - "job_id": request.job_id, - "schedule_id": request.schedule_id, - "schedule_his_id": request.schedule_his_id, "keyword": keyword, "selected_product": None, "reason": f"단일 상품 유사도({similarity:.4f}) < 기준({similarity_threshold})", @@ -160,6 +157,6 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict except Exception as e: logger.error( - f"유사도 분석 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'" + f"유사도 분석 서비스 오류: keyword='{keyword}', error='{e}'" ) raise InvalidItemDataException() From 5700b777962528fa83a7cdf6a5f49dc2ed7bf90c Mon Sep 17 00:00:00 2001 From: thkim7 Date: Tue, 16 Sep 2025 11:13:44 +0900 Subject: [PATCH 23/57] chore: poetry run black . --- .../app/api/endpoints/blog.py | 6 ++++-- .../app/api/endpoints/keywords.py | 2 ++ apps/pre-processing-service/app/api/router.py | 1 + .../app/service/blog/base_blog_post_service.py | 13 ++++++++----- .../service/blog/blogger_blog_post_adapter.py | 6 ++++-- .../service/blog/blogger_blog_post_service.py | 18 +++++++++--------- .../app/service/crawl_service.py | 4 +--- .../app/service/crawlers/base_crawler.py | 6 +++--- .../app/service/crawlers/detail_crawler.py | 3 ++- .../app/service/crawlers/search_crawler.py | 3 ++- .../app/service/search_service.py | 4 +--- .../app/service/similarity_service.py | 4 +--- .../app/test/test_keyword.py | 1 + .../app/utils/crawling_util.py | 2 +- 14 files changed, 40 insertions(+), 33 deletions(-) diff --git a/apps/pre-processing-service/app/api/endpoints/blog.py b/apps/pre-processing-service/app/api/endpoints/blog.py index 158faf20..85da62b2 100644 --- a/apps/pre-processing-service/app/api/endpoints/blog.py +++ b/apps/pre-processing-service/app/api/endpoints/blog.py @@ -4,7 +4,9 @@ from ...model.schemas import * from app.service.blog.tistory_blog_post_service import TistoryBlogPostService from app.service.blog.naver_blog_post_service import NaverBlogPostService -from ...service.blog.blogger_blog_post_adapter import BloggerBlogPostAdapter # 수정된 import +from ...service.blog.blogger_blog_post_adapter import ( + BloggerBlogPostAdapter, +) # 수정된 import router = APIRouter() @@ -74,4 +76,4 @@ async def publish(request: RequestBlogPublish): "블로거 블로그 포스팅에 실패했습니다.", status_code=500 ) - return ResponseBlogPublish(status="success", metadata=result) \ No newline at end of file + return ResponseBlogPublish(status="success", metadata=result) diff --git a/apps/pre-processing-service/app/api/endpoints/keywords.py b/apps/pre-processing-service/app/api/endpoints/keywords.py index 43c0049b..a1028391 100644 --- a/apps/pre-processing-service/app/api/endpoints/keywords.py +++ b/apps/pre-processing-service/app/api/endpoints/keywords.py @@ -5,10 +5,12 @@ router = APIRouter() + @router.get("/") async def root(): return {"message": "keyword API"} + @router.post( "/search", response_model=ResponseNaverSearch, summary="네이버 키워드 검색" ) diff --git a/apps/pre-processing-service/app/api/router.py b/apps/pre-processing-service/app/api/router.py index b180c97e..99286cf6 100644 --- a/apps/pre-processing-service/app/api/router.py +++ b/apps/pre-processing-service/app/api/router.py @@ -17,6 +17,7 @@ # 모듈 테스터를 위한 endpoint -> 추후 삭제 예정 api_router.include_router(test.router, prefix="/tests", tags=["Test"]) + @api_router.get("/ping") async def root(): return {"message": "서버 실행중입니다."} diff --git a/apps/pre-processing-service/app/service/blog/base_blog_post_service.py b/apps/pre-processing-service/app/service/blog/base_blog_post_service.py index d6d6989b..f55bdba0 100644 --- a/apps/pre-processing-service/app/service/blog/base_blog_post_service.py +++ b/apps/pre-processing-service/app/service/blog/base_blog_post_service.py @@ -5,6 +5,7 @@ from app.errors.BlogPostingException import * from app.errors.CrawlingException import * + class BaseBlogPostService(ABC): """ 블로그 포스팅 서비스 추상 클래스 @@ -22,7 +23,7 @@ def __init__(self, use_webdriver=True): # 블로그 포스팅용 설정으로 초기화 self.crawling_service = CrawlingUtil( headless=False, # 네이버 탐지 우회를 위해 headless 비활성화 - for_blog_posting=True + for_blog_posting=True, ) self.web_driver = self.crawling_service.get_driver() self.wait_driver = self.crawling_service.get_wait() @@ -61,7 +62,9 @@ def _get_platform_name(self) -> str: pass @abstractmethod - def _validate_content(self, title: str, content: str, tags: Optional[List[str]] = None) -> None: + def _validate_content( + self, title: str, content: str, tags: Optional[List[str]] = None + ) -> None: """ 공통 유효성 검사 로직 :param title: 포스트 제목 @@ -100,10 +103,10 @@ def post_content(self, title: str, content: str, tags: List[str] = None) -> Dict "platform": self._get_platform_name(), "title": title, "content_length": len(content), - "tags": tags or [] + "tags": tags or [], } def __del__(self): """공통 리소스 정리""" - if hasattr(self, 'web_driver') and self.web_driver: - self.web_driver.quit() \ No newline at end of file + if hasattr(self, "web_driver") and self.web_driver: + self.web_driver.quit() diff --git a/apps/pre-processing-service/app/service/blog/blogger_blog_post_adapter.py b/apps/pre-processing-service/app/service/blog/blogger_blog_post_adapter.py index 1daba4af..717a102e 100644 --- a/apps/pre-processing-service/app/service/blog/blogger_blog_post_adapter.py +++ b/apps/pre-processing-service/app/service/blog/blogger_blog_post_adapter.py @@ -57,7 +57,9 @@ def _get_platform_name(self) -> str: """플랫폼 이름 반환""" return "Blogger" - def _validate_content(self, title: str, content: str, tags: Optional[List[str]] = None) -> None: + def _validate_content( + self, title: str, content: str, tags: Optional[List[str]] = None + ) -> None: """ API 전용 유효성 검사 호출 """ @@ -79,4 +81,4 @@ def __del__(self): API 서비스이므로 웹드라이버 정리가 불필요 """ # 웹드라이버가 없으므로 정리할 것이 없음 - pass \ No newline at end of file + pass diff --git a/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py b/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py index 86de82a6..8bdeb221 100644 --- a/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py +++ b/apps/pre-processing-service/app/service/blog/blogger_blog_post_service.py @@ -81,16 +81,14 @@ def authenticate_with_google_oauth(self) -> bool: except Exception as e: raise BloggerApiException("API 인증 실패", e) - def create_post_via_api(self, title: str, content: str, labels: List[str] = None) -> Dict: + def create_post_via_api( + self, title: str, content: str, labels: List[str] = None + ) -> Dict: """API를 통한 포스트 생성 (Selenium write_content와 완전히 다름)""" if not self.authenticated: self.authenticate_with_google_oauth() - post_data = { - "title": title, - "content": content, - "labels": labels or [] - } + post_data = {"title": title, "content": content, "labels": labels or []} try: result = ( @@ -102,17 +100,19 @@ def create_post_via_api(self, title: str, content: str, labels: List[str] = None return { "blogger_post_id": result.get("id"), "published_url": result.get("url"), - "status": "published" + "status": "published", } except Exception as e: raise BlogPostPublishException( platform="Blogger", reason="API 통신 중 오류가 발생했습니다." ) from e - def validate_api_content(self, title: str, content: str, labels: List[str] = None) -> None: + def validate_api_content( + self, title: str, content: str, labels: List[str] = None + ) -> None: """API 전용 유효성 검사""" if not title or not title.strip(): raise BlogContentValidationException("title", "제목이 비어있습니다") if not content or not content.strip(): raise BlogContentValidationException("content", "내용이 비어있습니다") - # Blogger는 태그가 선택사항 \ No newline at end of file + # Blogger는 태그가 선택사항 diff --git a/apps/pre-processing-service/app/service/crawl_service.py b/apps/pre-processing-service/app/service/crawl_service.py index 7d6a8d1a..548df05d 100644 --- a/apps/pre-processing-service/app/service/crawl_service.py +++ b/apps/pre-processing-service/app/service/crawl_service.py @@ -44,9 +44,7 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), } - logger.info( - f"상품 상세 크롤링 서비스 완료: status=success" - ) + logger.info(f"상품 상세 크롤링 서비스 완료: status=success") return response_data except Exception as e: diff --git a/apps/pre-processing-service/app/service/crawlers/base_crawler.py b/apps/pre-processing-service/app/service/crawlers/base_crawler.py index dc495843..27934ab5 100644 --- a/apps/pre-processing-service/app/service/crawlers/base_crawler.py +++ b/apps/pre-processing-service/app/service/crawlers/base_crawler.py @@ -42,15 +42,15 @@ def _setup_httpx(self): async def close(self): """리소스 정리""" - if self.use_selenium and hasattr(self, 'crawling_util'): + if self.use_selenium and hasattr(self, "crawling_util"): try: self.crawling_util.close() logger.info("Selenium WebDriver 종료 완료") except Exception as e: logger.warning(f"Selenium WebDriver 종료 중 오류: {e}") - elif hasattr(self, 'client'): + elif hasattr(self, "client"): try: await self.client.aclose() logger.info("httpx 클라이언트 종료 완료") except Exception as e: - logger.warning(f"httpx 클라이언트 종료 중 오류: {e}") \ No newline at end of file + logger.warning(f"httpx 클라이언트 종료 중 오류: {e}") diff --git a/apps/pre-processing-service/app/service/crawlers/detail_crawler.py b/apps/pre-processing-service/app/service/crawlers/detail_crawler.py index 83829f5a..885fd2f0 100644 --- a/apps/pre-processing-service/app/service/crawlers/detail_crawler.py +++ b/apps/pre-processing-service/app/service/crawlers/detail_crawler.py @@ -4,6 +4,7 @@ from .search_crawler import SearchCrawler from loguru import logger + class DetailCrawler(SearchCrawler): """SearchCrawler를 확장한 상세 크롤링 클래스""" @@ -228,4 +229,4 @@ def _extract_images(self, soup: BeautifulSoup) -> list[str]: logger.debug(f"이미지 URL 추출: {src}") logger.info(f"총 {len(images)}개 이미지 URL 추출 완료") - return images \ No newline at end of file + return images diff --git a/apps/pre-processing-service/app/service/crawlers/search_crawler.py b/apps/pre-processing-service/app/service/crawlers/search_crawler.py index 41610a2d..a0d46e02 100644 --- a/apps/pre-processing-service/app/service/crawlers/search_crawler.py +++ b/apps/pre-processing-service/app/service/crawlers/search_crawler.py @@ -5,6 +5,7 @@ from bs4 import BeautifulSoup from selenium.webdriver.common.by import By + class SearchCrawler(BaseCrawler): """상품 검색 전용 크롤러""" @@ -133,4 +134,4 @@ async def close(self): await self.client.aclose() logger.info("httpx 클라이언트 종료 완료") except Exception as e: - logger.warning(f"httpx 클라이언트 종료 중 오류: {e}") \ No newline at end of file + logger.warning(f"httpx 클라이언트 종료 중 오류: {e}") diff --git a/apps/pre-processing-service/app/service/search_service.py b/apps/pre-processing-service/app/service/search_service.py index ec4ca59c..a71d6a8d 100644 --- a/apps/pre-processing-service/app/service/search_service.py +++ b/apps/pre-processing-service/app/service/search_service.py @@ -94,9 +94,7 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: } except Exception as e: - logger.error( - f"검색 서비스 오류: keyword='{keyword}', error='{e}'" - ) + logger.error(f"검색 서비스 오류: keyword='{keyword}', error='{e}'") raise InvalidItemDataException() finally: diff --git a/apps/pre-processing-service/app/service/similarity_service.py b/apps/pre-processing-service/app/service/similarity_service.py index 0241fca3..c77aa8ba 100644 --- a/apps/pre-processing-service/app/service/similarity_service.py +++ b/apps/pre-processing-service/app/service/similarity_service.py @@ -156,7 +156,5 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict } except Exception as e: - logger.error( - f"유사도 분석 서비스 오류: keyword='{keyword}', error='{e}'" - ) + logger.error(f"유사도 분석 서비스 오류: keyword='{keyword}', error='{e}'") raise InvalidItemDataException() diff --git a/apps/pre-processing-service/app/test/test_keyword.py b/apps/pre-processing-service/app/test/test_keyword.py index 095b6607..11bd69fa 100644 --- a/apps/pre-processing-service/app/test/test_keyword.py +++ b/apps/pre-processing-service/app/test/test_keyword.py @@ -4,6 +4,7 @@ client = TestClient(app) + def test_read_root(): response = client.get("/keywords/") assert response.status_code == 200 diff --git a/apps/pre-processing-service/app/utils/crawling_util.py b/apps/pre-processing-service/app/utils/crawling_util.py index ca9d0405..315df32a 100644 --- a/apps/pre-processing-service/app/utils/crawling_util.py +++ b/apps/pre-processing-service/app/utils/crawling_util.py @@ -67,4 +67,4 @@ def close(self): """드라이버 종료""" if self.driver: self.driver.quit() - self.driver = None \ No newline at end of file + self.driver = None From 2172cde826a52a6405be03f76d12408314db00f3 Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Tue, 16 Sep 2025 11:21:46 +0900 Subject: [PATCH 24/57] Loki e2e test support (#99) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: e2e loki test set up * fix: E2e test 클래스 수행 전 truncate --- .../main/resources/application-test-e2e.yml | 2 +- .../src/main/resources/log4j2-develop.yml | 23 +++ .../src/main/resources/log4j2-test-e2e.yml | 168 ++++++++++++++++++ .../setup/config/E2eTestConfiguration.java | 25 ++- .../e2e/setup/support/E2eTestSupport.java | 2 + 5 files changed, 218 insertions(+), 2 deletions(-) create mode 100644 apps/user-service/src/main/resources/log4j2-test-e2e.yml diff --git a/apps/user-service/src/main/resources/application-test-e2e.yml b/apps/user-service/src/main/resources/application-test-e2e.yml index f7dceba9..3a777909 100644 --- a/apps/user-service/src/main/resources/application-test-e2e.yml +++ b/apps/user-service/src/main/resources/application-test-e2e.yml @@ -18,4 +18,4 @@ mybatis: map-underscore-to-camel-case: true logging: - config: classpath:log4j2-production.yml \ No newline at end of file + config: classpath:log4j2-test-e2e.yml \ No newline at end of file diff --git a/apps/user-service/src/main/resources/log4j2-develop.yml b/apps/user-service/src/main/resources/log4j2-develop.yml index 69833c98..41a369bf 100644 --- a/apps/user-service/src/main/resources/log4j2-develop.yml +++ b/apps/user-service/src/main/resources/log4j2-develop.yml @@ -61,6 +61,29 @@ Configuration: - name: "runId" value: "${ctx:runId}" +# ExecutionDB: +# name: ExecutionDB +# class: org.apache.logging.log4j.core.appender.db.jdbc.JdbcAppender +# tableName: execution_log +# columnMappings: +# - name: log_level +# pattern: "%level" +# - name: log_message +# pattern: "%message" +# - name: trace_id +# pattern: "%X{traceId}" +# - name: execution_type +# pattern: "%X{executionType}" +# - name: source_id +# pattern: "%X{sourceId}" +# - name: run_id +# pattern: "%X{runId}" +# - name: executed_at +# pattern: "%d{yyyy-MM-dd HH:mm:ss}" +# connectionSource: +# class: org.apache.logging.log4j.core.appender.db.jdbc.DataSourceConnectionSource +# dataSource: "#dataSource + # 개발용 일반 로그 파일 File: - name: file-dev-appender diff --git a/apps/user-service/src/main/resources/log4j2-test-e2e.yml b/apps/user-service/src/main/resources/log4j2-test-e2e.yml new file mode 100644 index 00000000..557f426b --- /dev/null +++ b/apps/user-service/src/main/resources/log4j2-test-e2e.yml @@ -0,0 +1,168 @@ +Configuration: + status: DEBUG + name: e2e + + properties: + property: + - name: "app-name" + value: "user-service" + - name: "log-path" + value: "./logs" + - name: "charset-UTF-8" + value: "UTF-8" + # DEBUG 환경용 콘솔 패턴 - 더 간단하고 가독성 좋게 + - name: "console-layout-pattern" + value: "%highlight{[%-5level]} [%X{traceId}] [%X{spanId}] %d{HH:mm:ss} [%t] %n %logger{20} - %msg%n%n " + # 파일용 패턴 + - name: "file-layout-pattern" + value: "[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n" + # 개발 환경용 로그 파일들 + - name: "dev-log" + value: ${log-path}/develop/app.log + - name: "error-log" + value: ${log-path}/develop/error.log + + Appenders: + # 콘솔 출력 - 개발 시 주요 출력 + Console: + name: console-appender + target: SYSTEM_OUT + PatternLayout: + pattern: ${console-layout-pattern} + disableAnsi: false + + Loki: + name: loki-appender + host: localhost + port: ${sys:loki.port} + JsonLayout: + compact: true + eventEol: true + includeStacktrace: true + KeyValuePair: + - key: "app" + value: "${app-name}" + - key: "env" + value: "test-e2e" + Label: + - name: "app" + value: "${app-name}" + - name: "env" + value: "test-e2e" + - name: "traceId" + value: "${ctx:traceId}" + - name: "spanId" + value: "${ctx:spanId}" + - name: "executionType" + value: "${ctx:executionType:-application}" + - name: "sourceId" + value: "${ctx:sourceId}" + - name: "runId" + value: "${ctx:runId}" + +# ExecutionDB: +# name: ExecutionDB +# class: org.apache.logging.log4j.core.appender.db.jdbc.JdbcAppender +# tableName: execution_log +# columnMappings: +# - name: log_level +# pattern: "%level" +# - name: log_message +# pattern: "%message" +# - name: trace_id +# pattern: "%X{traceId}" +# - name: execution_type +# pattern: "%X{executionType}" +# - name: source_id +# pattern: "%X{sourceId}" +# - name: run_id +# pattern: "%X{runId}" +# - name: executed_at +# pattern: "%d{yyyy-MM-dd HH:mm:ss}" +# connectionSource: +# class: org.apache.logging.log4j.core.appender.db.jdbc.DataSourceConnectionSource +# dataSource: "#dataSource + + # 개발용 일반 로그 파일 + File: + - name: file-dev-appender + fileName: ${dev-log} + PatternLayout: + pattern: ${file-layout-pattern} + - name: file-error-appender + fileName: ${error-log} + PatternLayout: + pattern: ${file-layout-pattern} + ThresholdFilter: + level: ERROR + + Loggers: + # Root 로거 - 개발환경에서는 기본적으로 INFO 레벨 + Root: + level: INFO + AppenderRef: + - ref: console-appender + + Logger: + # 애플리케이션 로그 - 개발 시 모든 레벨 + Loki 전송 + - name: site.icebang + additivity: false + level: DEBUG + AppenderRef: + - ref: console-appender + - ref: loki-appender + - ref: file-dev-appender + - ref: file-error-appender + + # Spring Framework - 개발 시 필요한 정보만 + - name: org.springframework + additivity: false + level: INFO + AppenderRef: + - ref: console-appender + - ref: file-dev-appender + + # Spring Security - 인증 디버깅용 + - name: org.springframework.security + level: DEBUG + additivity: false + AppenderRef: + - ref: console-appender + - ref: file-dev-appender + - ref: loki-appender + + # 웹 요청 로그 - API 개발 시 유용 + - name: org.springframework.web + level: DEBUG + additivity: false + AppenderRef: + - ref: console-appender + - ref: file-dev-appender + - ref: loki-appender + + # 트랜잭션 로그 - DB 작업 디버깅 + - name: org.springframework.transaction + level: DEBUG + additivity: false + AppenderRef: + - ref: console-appender + - ref: file-dev-appender + - ref: loki-appender + + # HikariCP 로그 비활성화 + - name: com.zaxxer.hikari + level: OFF + + # SQL 로그 - 개발 시 쿼리 확인용 (필요시 활성화) + - name: org.hibernate.SQL + level: DEBUG + additivity: false + AppenderRef: + - ref: console-appender + + # 파라미터 바인딩 로그 (필요시 활성화) + - name: org.hibernate.type.descriptor.sql.BasicBinder + level: TRACE + additivity: false + AppenderRef: + - ref: console-appender \ No newline at end of file diff --git a/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java index 4976d0b8..dd5e0d1a 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java @@ -5,8 +5,12 @@ import org.springframework.context.annotation.Bean; import org.springframework.test.context.DynamicPropertyRegistry; import org.springframework.test.context.DynamicPropertySource; +import org.testcontainers.containers.GenericContainer; import org.testcontainers.containers.MariaDBContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.containers.wait.strategy.Wait; import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper; +import org.testcontainers.utility.DockerImageName; @TestConfiguration(proxyBeanMethods = false) public class E2eTestConfiguration { @@ -15,6 +19,11 @@ public ObjectMapper objectMapper() { return new ObjectMapper(); } + @Bean + public Network testNetwork() { + return Network.newNetwork(); + } + @Bean @ServiceConnection MariaDBContainer mariadbContainer() { @@ -24,8 +33,20 @@ MariaDBContainer mariadbContainer() { .withPassword("qwer1234"); } + @Bean + GenericContainer lokiContainer(Network network) { + return new GenericContainer<>(DockerImageName.parse("grafana/loki:2.9.0")) + .withNetwork(network) + .withNetworkAliases("loki") + .withExposedPorts(3100) + .withCommand("-config.file=/etc/loki/local-config.yaml") + .waitingFor(Wait.forHttp("/ready")) + .withStartupTimeout(java.time.Duration.ofMinutes(2)); + } + @DynamicPropertySource - static void configureProperties(DynamicPropertyRegistry registry, MariaDBContainer mariadb) { + static void configureProperties( + DynamicPropertyRegistry registry, MariaDBContainer mariadb, GenericContainer loki) { // MariaDB 연결 설정 registry.add("spring.datasource.url", mariadb::getJdbcUrl); registry.add("spring.datasource.username", mariadb::getUsername); @@ -39,5 +60,7 @@ static void configureProperties(DynamicPropertyRegistry registry, MariaDBContain registry.add("spring.hikari.maximum-pool-size", () -> "10"); registry.add("spring.hikari.minimum-idle", () -> "5"); registry.add("spring.hikari.pool-name", () -> "HikariCP-E2E"); + + System.setProperty("loki.port", String.valueOf(loki.getMappedPort(3100))); } } diff --git a/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java index c2d10870..56a1259f 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java @@ -5,6 +5,7 @@ import org.springframework.boot.test.web.client.TestRestTemplate; import org.springframework.boot.test.web.server.LocalServerPort; import org.springframework.context.annotation.Import; +import org.springframework.test.context.jdbc.Sql; import org.springframework.test.web.servlet.MockMvc; import org.springframework.web.context.WebApplicationContext; import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper; @@ -15,6 +16,7 @@ @Import(E2eTestConfiguration.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) @E2eTest +@Sql(value = "classpath:sql/00-truncate.sql", executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS) public abstract class E2eTestSupport { @Autowired protected TestRestTemplate restTemplate; From af62f527666ad52ec57781d2e2618c7e4fa349c5 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Tue, 16 Sep 2025 12:37:28 +0900 Subject: [PATCH 25/57] =?UTF-8?q?feat:=20=EC=98=88=EC=8B=9C=20=EB=8D=B0?= =?UTF-8?q?=EC=9D=B4=ED=84=B0=EB=A1=9C=20GPT=ED=95=9C=ED=85=8C=20=EC=BD=98?= =?UTF-8?q?=ED=85=90=EC=B8=A0=20=EC=83=9D=EC=84=B1=20=ED=9B=84=20blogger?= =?UTF-8?q?=EC=97=90=20=EC=97=85=EB=A1=9C=EB=93=9C=20=EC=84=B1=EA=B3=B5?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/service/keyword_service.py | 1 - .../service/product_blog_posting_service.py | 405 ++++++++++++++++++ apps/pre-processing-service/pyproject.toml | 1 + 3 files changed, 406 insertions(+), 1 deletion(-) create mode 100644 apps/pre-processing-service/app/service/product_blog_posting_service.py diff --git a/apps/pre-processing-service/app/service/keyword_service.py b/apps/pre-processing-service/app/service/keyword_service.py index 575767ee..f8065fa3 100644 --- a/apps/pre-processing-service/app/service/keyword_service.py +++ b/apps/pre-processing-service/app/service/keyword_service.py @@ -1,4 +1,3 @@ -# Pydantic 모델을 가져오기 위해 schemas 파일 import import json import random diff --git a/apps/pre-processing-service/app/service/product_blog_posting_service.py b/apps/pre-processing-service/app/service/product_blog_posting_service.py new file mode 100644 index 00000000..6f728277 --- /dev/null +++ b/apps/pre-processing-service/app/service/product_blog_posting_service.py @@ -0,0 +1,405 @@ +# product_blog_posting_service.py +import json +import logging +import os +from datetime import datetime +from typing import Dict, List, Optional, Any +from dataclasses import dataclass +from enum import Enum + +import openai +from dotenv import load_dotenv + +from app.service.blog.blogger_blog_post_adapter import BloggerBlogPostAdapter +from app.errors.BlogPostingException import * + +# 환경변수 로드 +load_dotenv('.env.dev') + + +class PostingStatus(Enum): + PENDING = "pending" + PROCESSING = "processing" + SUCCESS = "success" + FAILED = "failed" + RETRY = "retry" + + +@dataclass +class ProductData: + """크롤링된 상품 데이터 모델""" + tag: str + product_url: str + title: str + price: int + rating: float + options: List[Dict[str, Any]] + material_info: Dict[str, str] + product_images: List[str] + crawled_at: str + + @classmethod + def from_dict(cls, data: Dict) -> 'ProductData': + """딕셔너리에서 ProductData 객체 생성""" + product_detail = data.get('product_detail', {}) + return cls( + tag=data.get('tag', ''), + product_url=product_detail.get('url', ''), + title=product_detail.get('title', ''), + price=product_detail.get('price', 0), + rating=product_detail.get('rating', 0.0), + options=product_detail.get('options', []), + material_info=product_detail.get('material_info', {}), + product_images=product_detail.get('product_images', []), + crawled_at=data.get('crawled_at', '') + ) + + +@dataclass +class BlogPostContent: + """생성된 블로그 포스트 콘텐츠""" + title: str + content: str + tags: List[str] + + +@dataclass +class BlogContentRequest: + """블로그 콘텐츠 생성 요청""" + content_style: str = "informative" # "informative", "promotional", "review" + target_keywords: List[str] = None + include_pricing: bool = True + include_specifications: bool = True + content_length: str = "medium" # "short", "medium", "long" + + +class ProductContentGenerator: + """GPT를 활용한 상품 블로그 콘텐츠 생성""" + + def __init__(self): + # 환경변수에서 OpenAI API 키 로드 + self.openai_api_key = os.getenv('OPENAI_API_KEY') + if not self.openai_api_key: + raise ValueError("OPENAI_API_KEY가 .env.dev 파일에 설정되지 않았습니다.") + + openai.api_key = self.openai_api_key + + def generate_blog_content(self, product_data: ProductData, request: BlogContentRequest) -> BlogPostContent: + """상품 데이터를 기반으로 블로그 콘텐츠 생성""" + + # 1. 상품 정보 정리 + product_info = self._format_product_info(product_data, request) + + # 2. 프롬프트 생성 + prompt = self._create_blog_prompt(product_info, request) + + # 3. GPT를 통한 콘텐츠 생성 + try: + response = openai.ChatCompletion.create( + model="gpt-3.5-turbo", + messages=[ + { + "role": "system", + "content": "당신은 전문적인 블로그 콘텐츠 작성자입니다. 상품 리뷰와 정보성 콘텐츠를 매력적이고 SEO 친화적으로 작성합니다." + }, + { + "role": "user", + "content": prompt + } + ], + temperature=0.7, + max_tokens=2000 + ) + + generated_content = response.choices[0].message.content + + # 4. 콘텐츠 파싱 및 구조화 + return self._parse_generated_content(generated_content, product_data, request) + + except Exception as e: + logging.error(f"콘텐츠 생성 실패: {e}") + return self._create_fallback_content(product_data, request) + + def _format_product_info(self, product_data: ProductData, request: BlogContentRequest) -> str: + """상품 정보를 텍스트로 포맷팅""" + info_parts = [ + f"상품명: {product_data.title}", + ] + + # 가격 정보 추가 + if request.include_pricing and product_data.price: + info_parts.append(f"가격: {product_data.price:,}원") + + # 평점 정보 추가 + if product_data.rating: + info_parts.append(f"평점: {product_data.rating}/5.0") + + # 사양 정보 추가 + if request.include_specifications and product_data.material_info: + info_parts.append("\n상품 사양:") + for key, value in product_data.material_info.items(): + info_parts.append(f"- {key}: {value}") + + # 옵션 정보 추가 + if product_data.options: + info_parts.append(f"\n구매 옵션 ({len(product_data.options)}개):") + for i, option in enumerate(product_data.options[:5], 1): # 처음 5개만 + info_parts.append(f"{i}. {option.get('name', 'N/A')}") + + # 구매 링크 + if product_data.product_url: + info_parts.append(f"\n구매 링크: {product_data.product_url}") + + return "\n".join(info_parts) + + def _create_blog_prompt(self, product_info: str, request: BlogContentRequest) -> str: + """블로그 작성용 프롬프트 생성""" + + # 스타일별 가이드라인 + style_guidelines = { + "informative": "객관적이고 상세한 정보 제공 중심으로, 독자가 제품을 이해할 수 있도록 전문적으로 작성", + "promotional": "제품의 장점과 매력을 강조하며, 구매 의욕을 자극할 수 있도록 매력적으로 작성", + "review": "실제 사용 경험을 바탕으로 한 솔직한 평가와 추천 중심으로 작성" + } + + # 길이별 가이드라인 + length_guidelines = { + "short": "800자 내외의 간결한 내용", + "medium": "1200자 내외의 적당한 길이", + "long": "1500자 이상의 상세한 내용" + } + + style_guide = style_guidelines.get(request.content_style, style_guidelines["informative"]) + length_guide = length_guidelines.get(request.content_length, length_guidelines["medium"]) + + # 키워드 정보 + keywords_text = "" + if request.target_keywords: + keywords_text = f"\n포함할 키워드: {', '.join(request.target_keywords)}" + + prompt = f""" +다음 상품 정보를 바탕으로 매력적인 블로그 포스트를 작성해주세요. + +상품 정보: +{product_info} + +작성 가이드라인: +- 스타일: {style_guide} +- 길이: {length_guide} +- 톤: 친근하면서도 신뢰할 수 있는, 정보 제공 중심{keywords_text} + +작성 요구사항: +1. SEO 친화적이고 클릭하고 싶은 매력적인 제목 +2. 독자의 관심을 끄는 도입부 +3. 상품의 핵심 특징과 장점을 구체적으로 설명 +4. 실제 사용 시나리오나 활용 팁 +5. 구매 결정에 도움이 되는 정보 +6. 자연스러운 마무리 + +HTML 태그를 사용해서 구조화된 콘텐츠로 작성해주세요. +(예:

,

,

,

    ,
  • 등) +""" + + return prompt + + def _parse_generated_content(self, content: str, product_data: ProductData, + request: BlogContentRequest) -> BlogPostContent: + """생성된 콘텐츠를 파싱하여 구조화""" + + # 제목 추출 (첫 번째 h1이나 강조된 줄) + lines = content.strip().split('\n') + title = product_data.title # 기본값 + + for line in lines[:10]: # 처음 10줄에서 제목 찾기 + clean_line = line.strip().replace('#', '').replace('

    ', '').replace('

    ', '') + if clean_line and len(clean_line) > 5 and ('제목' in line or '

    ' in line or line.startswith('#')): + title = clean_line + break + elif clean_line and len(clean_line) > 10 and len(clean_line) < 100: + # 적당한 길이의 첫 번째 줄을 제목으로 + title = clean_line + break + + # 태그 생성 + tags = self._generate_tags_from_product(product_data, request) + + return BlogPostContent( + title=title, + content=content, + tags=tags + ) + + def _generate_tags_from_product(self, product_data: ProductData, request: BlogContentRequest) -> List[str]: + """상품 정보 기반 태그 생성""" + tags = [] + + # 사용자 지정 키워드가 있으면 우선 추가 + if request.target_keywords: + tags.extend(request.target_keywords[:5]) + + # 기본 태그 추가 + if product_data.tag: + tags.append(product_data.tag) + + # 제품 타입 추론해서 태그 추가 + title_lower = product_data.title.lower() + if any(word in title_lower for word in ["iphone", "아이폰", "phone"]): + tags.extend(["아이폰", "스마트폰"]) + if any(word in title_lower for word in ["필름", "보호", "강화"]): + tags.extend(["보호필름", "강화필름"]) + if any(word in title_lower for word in ["케이스", "커버"]): + tags.extend(["폰케이스", "액세서리"]) + + # 재료 정보에서 태그 생성 + if product_data.material_info: + for key, value in product_data.material_info.items(): + if value and len(value.strip()) <= 20: # 너무 긴 값은 제외 + clean_value = value.strip() + if clean_value not in tags: + tags.append(clean_value) + + # 중복 제거 및 개수 제한 + unique_tags = [] + for tag in tags: + if tag not in unique_tags and len(unique_tags) < 10: + unique_tags.append(tag) + + return unique_tags + + def _create_fallback_content(self, product_data: ProductData, request: BlogContentRequest) -> BlogPostContent: + """콘텐츠 생성 실패 시 대안 콘텐츠 생성""" + title = f"{product_data.title} - 상품 정보 및 구매 가이드" + + content = f""" +

    {product_data.title}

    + +

    상품 소개

    +

    {product_data.title}에 대한 상세한 정보를 소개합니다.

    + +

    가격 정보

    +

    판매가: {product_data.price:,}원

    +""" + + if product_data.material_info: + content += "

    상품 사양

    \n
      \n" + for key, value in product_data.material_info.items(): + content += f"
    • {key}: {value}
    • \n" + content += "
    \n" + + if product_data.options: + content += f"

    구매 옵션 ({len(product_data.options)}가지)

    \n
      \n" + for option in product_data.options[:5]: + content += f"
    • {option.get('name', 'N/A')}
    • \n" + content += "
    \n" + + content += f""" +

    구매 안내

    +

    상품 구매는 여기에서 가능합니다.

    +""" + + return BlogPostContent( + title=title, + content=content, + tags=[product_data.tag] if product_data.tag else ["상품정보"] + ) + + +class ProductBlogPostingService: + """상품 데이터를 Blogger에 포스팅하는 메인 서비스""" + + def __init__(self): + self.content_generator = ProductContentGenerator() + self.blogger_service = BloggerBlogPostAdapter() + + def post_product_to_blogger(self, product_data_dict: Dict, request: BlogContentRequest) -> Dict[str, Any]: + """상품 데이터를 Blogger에 포스팅""" + try: + # 1. 상품 데이터 파싱 + product_data = ProductData.from_dict(product_data_dict) + + # 2. GPT를 통한 콘텐츠 생성 + blog_content = self.content_generator.generate_blog_content(product_data, request) + + # 3. Blogger에 포스팅 + self.blogger_service.post_content( + title=blog_content.title, + content=blog_content.content, + tags=blog_content.tags + ) + + return { + "status": "success", + "platform": "blogger", + "title": blog_content.title, + "tags": blog_content.tags, + "posted_at": datetime.now().isoformat(), + "product_tag": product_data.tag + } + + except Exception as e: + logging.error(f"Blogger 포스팅 실패: {e}") + return { + "status": "failed", + "error": str(e), + "platform": "blogger", + "attempted_at": datetime.now().isoformat(), + "product_tag": product_data_dict.get("tag", "unknown") + } + + def batch_post_products(self, products_data: List[Dict], request: BlogContentRequest) -> List[Dict[str, Any]]: + """여러 상품을 일괄 포스팅""" + results = [] + + for product_data in products_data: + result = self.post_product_to_blogger(product_data, request) + results.append(result) + + # API 호출 제한을 고려한 딜레이 + import time + time.sleep(3) # 3초 대기 + + return results + + +# 사용 예시 +if __name__ == "__main__": + # 크롤링된 상품 데이터 + sample_product_data = { + "tag": "test001", + "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=902500949447", + "status": "success", + "product_detail": { + "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=902500949447", + "title": "코닝 적용 가능한 애플 13 강화 필름 iphone16/15promax 휴대 전화 필름 애플 11 안티-peep 및 먼지없는 빈", + "price": 430, + "rating": 5.0, + "options": [ + {"name": "먼지 없는 창고 2차 필름 [코닝글라스 방폭丨초투명]", "stock": 0}, + {"name": "먼지 없는 창고 2차 필름 [코닝글라스 방폭丨훔쳐보기 방지]", "stock": 0} + ], + "material_info": { + "상표": "다른", + "재료": "강화 유리", + "필름 종류": "전막", + "크기": "애플 16프로맥스( 6.9inch )", + "적용 모델": "iPhone13 Pro Max" + }, + "product_images": [] + }, + "crawled_at": "2025-09-16 11:49:24" + } + + # 서비스 초기화 (환경변수에서 자동으로 API 키 로드) + service = ProductBlogPostingService() + + # 블로그 포스팅 요청 설정 + blog_request = BlogContentRequest( + content_style="informative", # "informative", "promotional", "review" + target_keywords=["아이폰", "강화필름", "보호필름", "스마트폰액세서리"], + include_pricing=True, + content_length="medium" + ) + + # 블로그 포스팅 실행 + result = service.post_product_to_blogger(sample_product_data, blog_request) + print(json.dumps(result, indent=2, ensure_ascii=False)) \ No newline at end of file diff --git a/apps/pre-processing-service/pyproject.toml b/apps/pre-processing-service/pyproject.toml index f8cabbff..42997650 100644 --- a/apps/pre-processing-service/pyproject.toml +++ b/apps/pre-processing-service/pyproject.toml @@ -34,6 +34,7 @@ google-api-python-client = "^2.181.0" poetry-core=">=2.1.3,<3.0.0" dbutils=">=3.1.2,<4.0.0" onnxruntime = "^1.22.1" +openai = "^1.107.3" [build-system] requires = ["poetry-core>=2.0.0,<3.0.0"] From 64c78c74d97748dbeb1ba651fd9eaa23d4ac34e6 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Tue, 16 Sep 2025 14:01:47 +0900 Subject: [PATCH 26/57] =?UTF-8?q?feat:=20rag=EB=A1=9C=20=EC=BD=98=ED=85=90?= =?UTF-8?q?=EC=B8=A0=20=EC=83=9D=EC=84=B1=20=ED=9B=84=20blogger=20?= =?UTF-8?q?=ED=8F=AC=EC=8A=A4=ED=8C=85=20=ED=85=8C=EC=8A=A4=ED=8A=B8=20?= =?UTF-8?q?=ED=86=B5=EA=B3=BC?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../pre-processing-service/app/core/config.py | 3 + .../service/product_blog_posting_service.py | 101 +++------- .../test/test_product_blog_posting_service.py | 80 ++++++++ .../app/utils/crawling_util.py | 2 +- apps/pre-processing-service/poetry.lock | 172 ++++++++++++++++-- 5 files changed, 274 insertions(+), 84 deletions(-) create mode 100644 apps/pre-processing-service/app/test/test_product_blog_posting_service.py diff --git a/apps/pre-processing-service/app/core/config.py b/apps/pre-processing-service/app/core/config.py index ed54cc69..69e29d35 100644 --- a/apps/pre-processing-service/app/core/config.py +++ b/apps/pre-processing-service/app/core/config.py @@ -80,6 +80,9 @@ class BaseSettingsConfig(BaseSettings): # MeCab 사전 경로 (자동 감지) mecab_path: Optional[str] = None + # 테스트/추가용 필드 + openai_api_key: Optional[str] = None # << 이 부분 추가 + def __init__(self, **kwargs): super().__init__(**kwargs) diff --git a/apps/pre-processing-service/app/service/product_blog_posting_service.py b/apps/pre-processing-service/app/service/product_blog_posting_service.py index 6f728277..fa947855 100644 --- a/apps/pre-processing-service/app/service/product_blog_posting_service.py +++ b/apps/pre-processing-service/app/service/product_blog_posting_service.py @@ -1,4 +1,3 @@ -# product_blog_posting_service.py import json import logging import os @@ -7,7 +6,7 @@ from dataclasses import dataclass from enum import Enum -import openai +from openai import OpenAI from dotenv import load_dotenv from app.service.blog.blogger_blog_post_adapter import BloggerBlogPostAdapter @@ -16,6 +15,7 @@ # 환경변수 로드 load_dotenv('.env.dev') +client = OpenAI() class PostingStatus(Enum): PENDING = "pending" @@ -82,7 +82,7 @@ def __init__(self): if not self.openai_api_key: raise ValueError("OPENAI_API_KEY가 .env.dev 파일에 설정되지 않았습니다.") - openai.api_key = self.openai_api_key + client.api_key = self.openai_api_key def generate_blog_content(self, product_data: ProductData, request: BlogContentRequest) -> BlogPostContent: """상품 데이터를 기반으로 블로그 콘텐츠 생성""" @@ -95,8 +95,9 @@ def generate_blog_content(self, product_data: ProductData, request: BlogContentR # 3. GPT를 통한 콘텐츠 생성 try: - response = openai.ChatCompletion.create( - model="gpt-3.5-turbo", + + response = client.chat.completions.create( + model="gpt-4o-mini", messages=[ { "role": "system", @@ -194,9 +195,11 @@ def _create_blog_prompt(self, product_info: str, request: BlogContentRequest) -> 3. 상품의 핵심 특징과 장점을 구체적으로 설명 4. 실제 사용 시나리오나 활용 팁 5. 구매 결정에 도움이 되는 정보 -6. 자연스러운 마무리 -HTML 태그를 사용해서 구조화된 콘텐츠로 작성해주세요. +⚠️ 주의: +- 절대로 마지막에 '이 HTML 구조는…' 같은 자기 평가 문장을 추가하지 마세요. +- 출력 시 ```나 ```html 같은 코드 블록 구문을 포함하지 마세요. +- 오직 HTML 태그만 사용하여 구조화된 콘텐츠를 작성해주세요. (예:

    ,

    ,

    ,

      ,
    • 등) """ @@ -303,7 +306,6 @@ def _create_fallback_content(self, product_data: ProductData, request: BlogConte tags=[product_data.tag] if product_data.tag else ["상품정보"] ) - class ProductBlogPostingService: """상품 데이터를 Blogger에 포스팅하는 메인 서비스""" @@ -311,22 +313,20 @@ def __init__(self): self.content_generator = ProductContentGenerator() self.blogger_service = BloggerBlogPostAdapter() - def post_product_to_blogger(self, product_data_dict: Dict, request: BlogContentRequest) -> Dict[str, Any]: + def post_product_to_blogger(self, product_data: ProductData, request: BlogContentRequest) -> dict: """상품 데이터를 Blogger에 포스팅""" try: - # 1. 상품 데이터 파싱 - product_data = ProductData.from_dict(product_data_dict) - - # 2. GPT를 통한 콘텐츠 생성 + # 1. GPT를 통한 콘텐츠 생성 blog_content = self.content_generator.generate_blog_content(product_data, request) - # 3. Blogger에 포스팅 + # 2. Blogger에 포스팅 self.blogger_service.post_content( title=blog_content.title, content=blog_content.content, tags=blog_content.tags ) + # 3. 성공 결과 반환 return { "status": "success", "platform": "blogger", @@ -338,68 +338,25 @@ def post_product_to_blogger(self, product_data_dict: Dict, request: BlogContentR except Exception as e: logging.error(f"Blogger 포스팅 실패: {e}") + # ProductData 객체 기준으로 처리 return { "status": "failed", "error": str(e), "platform": "blogger", "attempted_at": datetime.now().isoformat(), - "product_tag": product_data_dict.get("tag", "unknown") + "product_tag": getattr(product_data, "tag", "unknown") } - def batch_post_products(self, products_data: List[Dict], request: BlogContentRequest) -> List[Dict[str, Any]]: - """여러 상품을 일괄 포스팅""" - results = [] - - for product_data in products_data: - result = self.post_product_to_blogger(product_data, request) - results.append(result) - - # API 호출 제한을 고려한 딜레이 - import time - time.sleep(3) # 3초 대기 - - return results - - -# 사용 예시 -if __name__ == "__main__": - # 크롤링된 상품 데이터 - sample_product_data = { - "tag": "test001", - "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=902500949447", - "status": "success", - "product_detail": { - "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=902500949447", - "title": "코닝 적용 가능한 애플 13 강화 필름 iphone16/15promax 휴대 전화 필름 애플 11 안티-peep 및 먼지없는 빈", - "price": 430, - "rating": 5.0, - "options": [ - {"name": "먼지 없는 창고 2차 필름 [코닝글라스 방폭丨초투명]", "stock": 0}, - {"name": "먼지 없는 창고 2차 필름 [코닝글라스 방폭丨훔쳐보기 방지]", "stock": 0} - ], - "material_info": { - "상표": "다른", - "재료": "강화 유리", - "필름 종류": "전막", - "크기": "애플 16프로맥스( 6.9inch )", - "적용 모델": "iPhone13 Pro Max" - }, - "product_images": [] - }, - "crawled_at": "2025-09-16 11:49:24" - } - - # 서비스 초기화 (환경변수에서 자동으로 API 키 로드) - service = ProductBlogPostingService() - - # 블로그 포스팅 요청 설정 - blog_request = BlogContentRequest( - content_style="informative", # "informative", "promotional", "review" - target_keywords=["아이폰", "강화필름", "보호필름", "스마트폰액세서리"], - include_pricing=True, - content_length="medium" - ) - - # 블로그 포스팅 실행 - result = service.post_product_to_blogger(sample_product_data, blog_request) - print(json.dumps(result, indent=2, ensure_ascii=False)) \ No newline at end of file + # def batch_post_products(self, products_data: List[Dict], request: BlogContentRequest) -> List[Dict[str, Any]]: + # """여러 상품을 일괄 포스팅""" + # results = [] + # + # for product_data in products_data: + # result = self.post_product_to_blogger(product_data, request) + # results.append(result) + # + # # API 호출 제한을 고려한 딜레이 + # import time + # time.sleep(3) # 3초 대기 + # + # return results \ No newline at end of file diff --git a/apps/pre-processing-service/app/test/test_product_blog_posting_service.py b/apps/pre-processing-service/app/test/test_product_blog_posting_service.py new file mode 100644 index 00000000..2757eb14 --- /dev/null +++ b/apps/pre-processing-service/app/test/test_product_blog_posting_service.py @@ -0,0 +1,80 @@ +import pytest +from app.service.product_blog_posting_service import ( + ProductBlogPostingService, BlogContentRequest, ProductData +) + +# 샘플 데이터 +sample_product_data = { + "tag": "test001", + "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=902500949447", + "status": "success", + "product_detail": { + "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=902500949447", + "title": "코닝 적용 가능한 애플 13 강화 필름 iphone16/15promax 휴대 전화 필름 애플 11 안티-peep 및 먼지없는 빈", + "price": 430, + "rating": 5.0, + "options": [ + {"name": "먼지 없는 창고 2차 필름 [코닝글라스 방폭丨초투명]", "stock": 0}, + {"name": "먼지 없는 창고 2차 필름 [코닝글라스 방폭丨훔쳐보기 방지]", "stock": 0} + ], + "material_info": { + "상표": "다른", + "재료": "강화 유리", + "필름 종류": "전막", + "크기": "애플 16프로맥스( 6.9inch )", + "적용 모델": "iPhone13 Pro Max" + }, + "product_images": [] + }, + "crawled_at": "2025-09-16 11:49:24" +} + + +@pytest.fixture +def blog_service(): + return ProductBlogPostingService() + + +def test_generate_blog_content(blog_service): + """GPT를 통한 블로그 콘텐츠 생성 테스트""" + request = BlogContentRequest( + content_style="informative", + target_keywords=["아이폰", "강화필름", "보호필름", "스마트폰액세서리"], + include_pricing=True, + content_length="medium" + ) + + product_obj = ProductData.from_dict(sample_product_data) + + # 순수 콘텐츠 생성만 테스트 + blog_content = blog_service.content_generator.generate_blog_content(product_obj, request) + + assert blog_content.title + assert "

      " in blog_content.content + assert len(blog_content.tags) > 0 + + +def test_post_product_to_blogger(blog_service, monkeypatch): + """Blogger 포스팅 테스트 (실제 API 호출을 막고 mock)""" + + class MockBloggerAdapter: + def post_content(self, title, content, tags): + return {"mock": True} + + monkeypatch.setattr(blog_service, "blogger_service", MockBloggerAdapter()) + + request = BlogContentRequest( + content_style="informative", + target_keywords=["아이폰", "강화필름", "보호필름", "스마트폰액세서리"], + include_pricing=True, + content_length="medium" + ) + + product_obj = ProductData.from_dict(sample_product_data) + + result = blog_service.post_product_to_blogger(product_obj, request) + + assert result["status"] == "success" + assert result["platform"] == "blogger" + assert "title" in result + assert "tags" in result diff --git a/apps/pre-processing-service/app/utils/crawling_util.py b/apps/pre-processing-service/app/utils/crawling_util.py index 315df32a..5e50528d 100644 --- a/apps/pre-processing-service/app/utils/crawling_util.py +++ b/apps/pre-processing-service/app/utils/crawling_util.py @@ -10,7 +10,7 @@ class CrawlingUtil: 블로그 포스팅과 상품 크롤링 모두 지원 """ - def __init__(self, headless: bool = False, for_blog_posting: bool = False): + def __init__(self, headless: bool = True, for_blog_posting: bool = False): """ :param headless: 헤드리스 모드 사용 여부 :param for_blog_posting: 블로그 포스팅용 설정 사용 여부 diff --git a/apps/pre-processing-service/poetry.lock b/apps/pre-processing-service/poetry.lock index 49d36b65..d47e6783 100644 --- a/apps/pre-processing-service/poetry.lock +++ b/apps/pre-processing-service/poetry.lock @@ -473,6 +473,18 @@ docs = ["docutils"] pg = ["PyGreSQL (>=5)"] tests = ["pytest (>=7)", "ruff"] +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + [[package]] name = "fastapi" version = "0.116.1" @@ -856,6 +868,94 @@ files = [ {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] +[[package]] +name = "jiter" +version = "0.11.0" +description = "Fast iterable JSON parser." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "jiter-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3893ce831e1c0094a83eeaf56c635a167d6fa8cc14393cc14298fd6fdc2a2449"}, + {file = "jiter-0.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:25c625b9b61b5a8725267fdf867ef2e51b429687f6a4eef211f4612e95607179"}, + {file = "jiter-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd4ca85fb6a62cf72e1c7f5e34ddef1b660ce4ed0886ec94a1ef9777d35eaa1f"}, + {file = "jiter-0.11.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:572208127034725e79c28437b82414028c3562335f2b4f451d98136d0fc5f9cd"}, + {file = "jiter-0.11.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:494ba627c7f550ad3dabb21862864b8f2216098dc18ff62f37b37796f2f7c325"}, + {file = "jiter-0.11.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8da18a99f58bca3ecc2d2bba99cac000a924e115b6c4f0a2b98f752b6fbf39a"}, + {file = "jiter-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ffd3b0fff3fabbb02cc09910c08144db6bb5697a98d227a074401e01ee63dd"}, + {file = "jiter-0.11.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fe6530aa738a4f7d4e4702aa8f9581425d04036a5f9e25af65ebe1f708f23be"}, + {file = "jiter-0.11.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e35d66681c133a03d7e974e7eedae89720fe8ca3bd09f01a4909b86a8adf31f5"}, + {file = "jiter-0.11.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c59459beca2fbc9718b6f1acb7bfb59ebc3eb4294fa4d40e9cb679dafdcc6c60"}, + {file = "jiter-0.11.0-cp310-cp310-win32.whl", hash = "sha256:b7b0178417b0dcfc5f259edbc6db2b1f5896093ed9035ee7bab0f2be8854726d"}, + {file = "jiter-0.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:11df2bf99fb4754abddd7f5d940a48e51f9d11624d6313ca4314145fcad347f0"}, + {file = "jiter-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cb5d9db02979c3f49071fce51a48f4b4e4cf574175fb2b11c7a535fa4867b222"}, + {file = "jiter-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1dc6a123f3471c4730db7ca8ba75f1bb3dcb6faeb8d46dd781083e7dee88b32d"}, + {file = "jiter-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09858f8d230f031c7b8e557429102bf050eea29c77ad9c34c8fe253c5329acb7"}, + {file = "jiter-0.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbe2196c4a0ce760925a74ab4456bf644748ab0979762139626ad138f6dac72d"}, + {file = "jiter-0.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5beb56d22b63647bafd0b74979216fdee80c580c0c63410be8c11053860ffd09"}, + {file = "jiter-0.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97025d09ef549795d8dc720a824312cee3253c890ac73c621721ddfc75066789"}, + {file = "jiter-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50880a6da65d8c23a2cf53c412847d9757e74cc9a3b95c5704a1d1a24667347"}, + {file = "jiter-0.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:452d80a1c86c095a242007bd9fc5d21b8a8442307193378f891cb8727e469648"}, + {file = "jiter-0.11.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e84e58198d4894668eec2da660ffff60e0f3e60afa790ecc50cb12b0e02ca1d4"}, + {file = "jiter-0.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df64edcfc5dd5279a791eea52aa113d432c933119a025b0b5739f90d2e4e75f1"}, + {file = "jiter-0.11.0-cp311-cp311-win32.whl", hash = "sha256:144fc21337d21b1d048f7f44bf70881e1586401d405ed3a98c95a114a9994982"}, + {file = "jiter-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:b0f32e644d241293b892b1a6dd8f0b9cc029bfd94c97376b2681c36548aabab7"}, + {file = "jiter-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb7b377688cc3850bbe5c192a6bd493562a0bc50cbc8b047316428fbae00ada"}, + {file = "jiter-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b7cbe3f25bd0d8abb468ba4302a5d45617ee61b2a7a638f63fee1dc086be99"}, + {file = "jiter-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a7f0ec81d5b7588c5cade1eb1925b91436ae6726dc2df2348524aeabad5de6"}, + {file = "jiter-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07630bb46ea2a6b9c6ed986c6e17e35b26148cce2c535454b26ee3f0e8dcaba1"}, + {file = "jiter-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7764f27d28cd4a9cbc61704dfcd80c903ce3aad106a37902d3270cd6673d17f4"}, + {file = "jiter-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4a6c4a737d486f77f842aeb22807edecb4a9417e6700c7b981e16d34ba7c72"}, + {file = "jiter-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf408d2a0abd919b60de8c2e7bc5eeab72d4dafd18784152acc7c9adc3291591"}, + {file = "jiter-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cdef53eda7d18e799625023e1e250dbc18fbc275153039b873ec74d7e8883e09"}, + {file = "jiter-0.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:53933a38ef7b551dd9c7f1064f9d7bb235bb3168d0fa5f14f0798d1b7ea0d9c5"}, + {file = "jiter-0.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11840d2324c9ab5162fc1abba23bc922124fedcff0d7b7f85fffa291e2f69206"}, + {file = "jiter-0.11.0-cp312-cp312-win32.whl", hash = "sha256:4f01a744d24a5f2bb4a11657a1b27b61dc038ae2e674621a74020406e08f749b"}, + {file = "jiter-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:29fff31190ab3a26de026da2f187814f4b9c6695361e20a9ac2123e4d4378a4c"}, + {file = "jiter-0.11.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:4441a91b80a80249f9a6452c14b2c24708f139f64de959943dfeaa6cb915e8eb"}, + {file = "jiter-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ff85fc6d2a431251ad82dbd1ea953affb5a60376b62e7d6809c5cd058bb39471"}, + {file = "jiter-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5e86126d64706fd28dfc46f910d496923c6f95b395138c02d0e252947f452bd"}, + {file = "jiter-0.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad8bd82165961867a10f52010590ce0b7a8c53da5ddd8bbb62fef68c181b921"}, + {file = "jiter-0.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b42c2cd74273455ce439fd9528db0c6e84b5623cb74572305bdd9f2f2961d3df"}, + {file = "jiter-0.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0062dab98172dd0599fcdbf90214d0dcde070b1ff38a00cc1b90e111f071982"}, + {file = "jiter-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb948402821bc76d1f6ef0f9e19b816f9b09f8577844ba7140f0b6afe994bc64"}, + {file = "jiter-0.11.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25a5b1110cca7329fd0daf5060faa1234be5c11e988948e4f1a1923b6a457fe1"}, + {file = "jiter-0.11.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:bf11807e802a214daf6c485037778843fadd3e2ec29377ae17e0706ec1a25758"}, + {file = "jiter-0.11.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:dbb57da40631c267861dd0090461222060960012d70fd6e4c799b0f62d0ba166"}, + {file = "jiter-0.11.0-cp313-cp313-win32.whl", hash = "sha256:8e36924dad32c48d3c5e188d169e71dc6e84d6cb8dedefea089de5739d1d2f80"}, + {file = "jiter-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:452d13e4fd59698408087235259cebe67d9d49173b4dacb3e8d35ce4acf385d6"}, + {file = "jiter-0.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:089f9df9f69532d1339e83142438668f52c97cd22ee2d1195551c2b1a9e6cf33"}, + {file = "jiter-0.11.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29ed1fe69a8c69bf0f2a962d8d706c7b89b50f1332cd6b9fbda014f60bd03a03"}, + {file = "jiter-0.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a4d71d7ea6ea8786291423fe209acf6f8d398a0759d03e7f24094acb8ab686ba"}, + {file = "jiter-0.11.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:9a6dff27eca70930bdbe4cbb7c1a4ba8526e13b63dc808c0670083d2d51a4a72"}, + {file = "jiter-0.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b1ae2a7593a62132c7d4c2abbee80bbbb94fdc6d157e2c6cc966250c564ef774"}, + {file = "jiter-0.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b13a431dba4b059e9e43019d3022346d009baf5066c24dcdea321a303cde9f0"}, + {file = "jiter-0.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:af62e84ca3889604ebb645df3b0a3f3bcf6b92babbff642bd214616f57abb93a"}, + {file = "jiter-0.11.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f3b32bb723246e6b351aecace52aba78adb8eeb4b2391630322dc30ff6c773"}, + {file = "jiter-0.11.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:adcab442f4a099a358a7f562eaa54ed6456fb866e922c6545a717be51dbed7d7"}, + {file = "jiter-0.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9967c2ab338ee2b2c0102fd379ec2693c496abf71ffd47e4d791d1f593b68e2"}, + {file = "jiter-0.11.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e7d0bed3b187af8b47a981d9742ddfc1d9b252a7235471ad6078e7e4e5fe75c2"}, + {file = "jiter-0.11.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:f6fe0283e903ebc55f1a6cc569b8c1f3bf4abd026fed85e3ff8598a9e6f982f0"}, + {file = "jiter-0.11.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:4ee5821e3d66606b29ae5b497230b304f1376f38137d69e35f8d2bd5f310ff73"}, + {file = "jiter-0.11.0-cp314-cp314-win32.whl", hash = "sha256:c2d13ba7567ca8799f17c76ed56b1d49be30df996eb7fa33e46b62800562a5e2"}, + {file = "jiter-0.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fb4790497369d134a07fc763cc88888c46f734abdd66f9fdf7865038bf3a8f40"}, + {file = "jiter-0.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e2bbf24f16ba5ad4441a9845e40e4ea0cb9eed00e76ba94050664ef53ef4406"}, + {file = "jiter-0.11.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:719891c2fb7628a41adff4f2f54c19380a27e6fdfdb743c24680ef1a54c67bd0"}, + {file = "jiter-0.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df7f1927cbdf34cb91262a5418ca06920fd42f1cf733936d863aeb29b45a14ef"}, + {file = "jiter-0.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e71ae6d969d0c9bab336c5e9e2fabad31e74d823f19e3604eaf96d9a97f463df"}, + {file = "jiter-0.11.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5661469a7b2be25ade3a4bb6c21ffd1e142e13351a0759f264dfdd3ad99af1ab"}, + {file = "jiter-0.11.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76c15ef0d3d02f8b389066fa4c410a0b89e9cc6468a1f0674c5925d2f3c3e890"}, + {file = "jiter-0.11.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63782a1350917a27817030716566ed3d5b3c731500fd42d483cbd7094e2c5b25"}, + {file = "jiter-0.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a7092b699646a1ddc03a7b112622d9c066172627c7382659befb0d2996f1659"}, + {file = "jiter-0.11.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f637b8e818f6d75540f350a6011ce21252573c0998ea1b4365ee54b7672c23c5"}, + {file = "jiter-0.11.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a624d87719e1b5d09c15286eaee7e1532a40c692a096ea7ca791121365f548c1"}, + {file = "jiter-0.11.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9d0146d8d9b3995821bb586fc8256636258947c2f39da5bab709f3a28fb1a0b"}, + {file = "jiter-0.11.0-cp39-cp39-win32.whl", hash = "sha256:d067655a7cf0831eb8ec3e39cbd752995e9b69a2206df3535b3a067fac23b032"}, + {file = "jiter-0.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:f05d03775a11aaf132c447436983169958439f1219069abf24662a672851f94e"}, + {file = "jiter-0.11.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:902b43386c04739229076bd1c4c69de5d115553d982ab442a8ae82947c72ede7"}, + {file = "jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4"}, +] + [[package]] name = "joblib" version = "1.5.2" @@ -1107,6 +1207,34 @@ packaging = "*" protobuf = "*" sympy = "*" +[[package]] +name = "openai" +version = "1.107.3" +description = "The official Python library for the openai API" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "openai-1.107.3-py3-none-any.whl", hash = "sha256:4ca54a847235ac04c6320da70fdc06b62d71439de9ec0aa40d5690c3064d4025"}, + {file = "openai-1.107.3.tar.gz", hash = "sha256:69bb8032b05c5f00f7660e422f70f9aabc94793b9a30c5f899360ed21e46314f"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.11,<5" + +[package.extras] +aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.8)"] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +realtime = ["websockets (>=13,<16)"] +voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"] + [[package]] name = "outcome" version = "1.3.0.post0" @@ -1181,14 +1309,14 @@ testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "poetry-core" -version = "2.1.3" +version = "2.2.0" description = "Poetry PEP 517 Build Backend" optional = false python-versions = "<4.0,>=3.9" groups = ["main"] files = [ - {file = "poetry_core-2.1.3-py3-none-any.whl", hash = "sha256:2c704f05016698a54ca1d327f46ce2426d72eaca6ff614132c8477c292266771"}, - {file = "poetry_core-2.1.3.tar.gz", hash = "sha256:0522a015477ed622c89aad56a477a57813cace0c8e7ff2a2906b7ef4a2e296a4"}, + {file = "poetry_core-2.2.0-py3-none-any.whl", hash = "sha256:0edea81d07e88cbd407369eef753c722da8ff1338f554788dc04636e756318fc"}, + {file = "poetry_core-2.2.0.tar.gz", hash = "sha256:b4033b71b99717a942030e074fec7e3082e5fde7a8ed10f02cd2413bdf940b1f"}, ] [[package]] @@ -1348,14 +1476,14 @@ files = [ [[package]] name = "pydantic" -version = "2.11.7" +version = "2.11.9" description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, - {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, + {file = "pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2"}, + {file = "pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2"}, ] [package.dependencies] @@ -1537,14 +1665,14 @@ rsa = ["cryptography"] [[package]] name = "pyparsing" -version = "3.2.3" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" +version = "3.2.4" +description = "pyparsing - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf"}, - {file = "pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be"}, + {file = "pyparsing-3.2.4-py3-none-any.whl", hash = "sha256:91d0fcde680d42cd031daf3a6ba20da3107e08a75de50da58360e7d94ab24d36"}, + {file = "pyparsing-3.2.4.tar.gz", hash = "sha256:fff89494f45559d0f2ce46613b419f632bbb6afbdaed49696d322bcf98a58e99"}, ] [package.extras] @@ -2019,6 +2147,28 @@ files = [ {file = "threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e"}, ] +[[package]] +name = "tqdm" +version = "4.67.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] +discord = ["requests"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "trio" version = "0.30.0" @@ -2186,4 +2336,4 @@ h11 = ">=0.9.0,<1" [metadata] lock-version = "2.1" python-versions = ">=3.11,<3.14" -content-hash = "9ca2813b1931948bd0600aa974faba9311a7cf00ea632ea4db5f9fcc80ebc518" +content-hash = "a2a9a376c855a2599db8f932f42029ee9f016babdf0f004cb53690caa8f5a330" From 4bffbae4a2b64a02425aa305696f895e92a72233 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Tue, 16 Sep 2025 14:11:48 +0900 Subject: [PATCH 27/57] feat: poetry run black --- .../service/product_blog_posting_service.py | 117 +++++++++++------- .../test/test_product_blog_posting_service.py | 23 ++-- 2 files changed, 86 insertions(+), 54 deletions(-) diff --git a/apps/pre-processing-service/app/service/product_blog_posting_service.py b/apps/pre-processing-service/app/service/product_blog_posting_service.py index fa947855..129c4666 100644 --- a/apps/pre-processing-service/app/service/product_blog_posting_service.py +++ b/apps/pre-processing-service/app/service/product_blog_posting_service.py @@ -13,10 +13,11 @@ from app.errors.BlogPostingException import * # 환경변수 로드 -load_dotenv('.env.dev') +load_dotenv(".env.dev") client = OpenAI() + class PostingStatus(Enum): PENDING = "pending" PROCESSING = "processing" @@ -28,6 +29,7 @@ class PostingStatus(Enum): @dataclass class ProductData: """크롤링된 상품 데이터 모델""" + tag: str product_url: str title: str @@ -39,25 +41,26 @@ class ProductData: crawled_at: str @classmethod - def from_dict(cls, data: Dict) -> 'ProductData': + def from_dict(cls, data: Dict) -> "ProductData": """딕셔너리에서 ProductData 객체 생성""" - product_detail = data.get('product_detail', {}) + product_detail = data.get("product_detail", {}) return cls( - tag=data.get('tag', ''), - product_url=product_detail.get('url', ''), - title=product_detail.get('title', ''), - price=product_detail.get('price', 0), - rating=product_detail.get('rating', 0.0), - options=product_detail.get('options', []), - material_info=product_detail.get('material_info', {}), - product_images=product_detail.get('product_images', []), - crawled_at=data.get('crawled_at', '') + tag=data.get("tag", ""), + product_url=product_detail.get("url", ""), + title=product_detail.get("title", ""), + price=product_detail.get("price", 0), + rating=product_detail.get("rating", 0.0), + options=product_detail.get("options", []), + material_info=product_detail.get("material_info", {}), + product_images=product_detail.get("product_images", []), + crawled_at=data.get("crawled_at", ""), ) @dataclass class BlogPostContent: """생성된 블로그 포스트 콘텐츠""" + title: str content: str tags: List[str] @@ -66,6 +69,7 @@ class BlogPostContent: @dataclass class BlogContentRequest: """블로그 콘텐츠 생성 요청""" + content_style: str = "informative" # "informative", "promotional", "review" target_keywords: List[str] = None include_pricing: bool = True @@ -78,13 +82,15 @@ class ProductContentGenerator: def __init__(self): # 환경변수에서 OpenAI API 키 로드 - self.openai_api_key = os.getenv('OPENAI_API_KEY') + self.openai_api_key = os.getenv("OPENAI_API_KEY") if not self.openai_api_key: raise ValueError("OPENAI_API_KEY가 .env.dev 파일에 설정되지 않았습니다.") client.api_key = self.openai_api_key - def generate_blog_content(self, product_data: ProductData, request: BlogContentRequest) -> BlogPostContent: + def generate_blog_content( + self, product_data: ProductData, request: BlogContentRequest + ) -> BlogPostContent: """상품 데이터를 기반으로 블로그 콘텐츠 생성""" # 1. 상품 정보 정리 @@ -101,27 +107,28 @@ def generate_blog_content(self, product_data: ProductData, request: BlogContentR messages=[ { "role": "system", - "content": "당신은 전문적인 블로그 콘텐츠 작성자입니다. 상품 리뷰와 정보성 콘텐츠를 매력적이고 SEO 친화적으로 작성합니다." + "content": "당신은 전문적인 블로그 콘텐츠 작성자입니다. 상품 리뷰와 정보성 콘텐츠를 매력적이고 SEO 친화적으로 작성합니다.", }, - { - "role": "user", - "content": prompt - } + {"role": "user", "content": prompt}, ], temperature=0.7, - max_tokens=2000 + max_tokens=2000, ) generated_content = response.choices[0].message.content # 4. 콘텐츠 파싱 및 구조화 - return self._parse_generated_content(generated_content, product_data, request) + return self._parse_generated_content( + generated_content, product_data, request + ) except Exception as e: logging.error(f"콘텐츠 생성 실패: {e}") return self._create_fallback_content(product_data, request) - def _format_product_info(self, product_data: ProductData, request: BlogContentRequest) -> str: + def _format_product_info( + self, product_data: ProductData, request: BlogContentRequest + ) -> str: """상품 정보를 텍스트로 포맷팅""" info_parts = [ f"상품명: {product_data.title}", @@ -153,25 +160,31 @@ def _format_product_info(self, product_data: ProductData, request: BlogContentRe return "\n".join(info_parts) - def _create_blog_prompt(self, product_info: str, request: BlogContentRequest) -> str: + def _create_blog_prompt( + self, product_info: str, request: BlogContentRequest + ) -> str: """블로그 작성용 프롬프트 생성""" # 스타일별 가이드라인 style_guidelines = { "informative": "객관적이고 상세한 정보 제공 중심으로, 독자가 제품을 이해할 수 있도록 전문적으로 작성", "promotional": "제품의 장점과 매력을 강조하며, 구매 의욕을 자극할 수 있도록 매력적으로 작성", - "review": "실제 사용 경험을 바탕으로 한 솔직한 평가와 추천 중심으로 작성" + "review": "실제 사용 경험을 바탕으로 한 솔직한 평가와 추천 중심으로 작성", } # 길이별 가이드라인 length_guidelines = { "short": "800자 내외의 간결한 내용", "medium": "1200자 내외의 적당한 길이", - "long": "1500자 이상의 상세한 내용" + "long": "1500자 이상의 상세한 내용", } - style_guide = style_guidelines.get(request.content_style, style_guidelines["informative"]) - length_guide = length_guidelines.get(request.content_length, length_guidelines["medium"]) + style_guide = style_guidelines.get( + request.content_style, style_guidelines["informative"] + ) + length_guide = length_guidelines.get( + request.content_length, length_guidelines["medium"] + ) # 키워드 정보 keywords_text = "" @@ -205,17 +218,24 @@ def _create_blog_prompt(self, product_info: str, request: BlogContentRequest) -> return prompt - def _parse_generated_content(self, content: str, product_data: ProductData, - request: BlogContentRequest) -> BlogPostContent: + def _parse_generated_content( + self, content: str, product_data: ProductData, request: BlogContentRequest + ) -> BlogPostContent: """생성된 콘텐츠를 파싱하여 구조화""" # 제목 추출 (첫 번째 h1이나 강조된 줄) - lines = content.strip().split('\n') + lines = content.strip().split("\n") title = product_data.title # 기본값 for line in lines[:10]: # 처음 10줄에서 제목 찾기 - clean_line = line.strip().replace('#', '').replace('

      ', '').replace('

      ', '') - if clean_line and len(clean_line) > 5 and ('제목' in line or '

      ' in line or line.startswith('#')): + clean_line = ( + line.strip().replace("#", "").replace("

      ", "").replace("

      ", "") + ) + if ( + clean_line + and len(clean_line) > 5 + and ("제목" in line or "

      " in line or line.startswith("#")) + ): title = clean_line break elif clean_line and len(clean_line) > 10 and len(clean_line) < 100: @@ -226,13 +246,11 @@ def _parse_generated_content(self, content: str, product_data: ProductData, # 태그 생성 tags = self._generate_tags_from_product(product_data, request) - return BlogPostContent( - title=title, - content=content, - tags=tags - ) + return BlogPostContent(title=title, content=content, tags=tags) - def _generate_tags_from_product(self, product_data: ProductData, request: BlogContentRequest) -> List[str]: + def _generate_tags_from_product( + self, product_data: ProductData, request: BlogContentRequest + ) -> List[str]: """상품 정보 기반 태그 생성""" tags = [] @@ -269,7 +287,9 @@ def _generate_tags_from_product(self, product_data: ProductData, request: BlogCo return unique_tags - def _create_fallback_content(self, product_data: ProductData, request: BlogContentRequest) -> BlogPostContent: + def _create_fallback_content( + self, product_data: ProductData, request: BlogContentRequest + ) -> BlogPostContent: """콘텐츠 생성 실패 시 대안 콘텐츠 생성""" title = f"{product_data.title} - 상품 정보 및 구매 가이드" @@ -303,9 +323,10 @@ def _create_fallback_content(self, product_data: ProductData, request: BlogConte return BlogPostContent( title=title, content=content, - tags=[product_data.tag] if product_data.tag else ["상품정보"] + tags=[product_data.tag] if product_data.tag else ["상품정보"], ) + class ProductBlogPostingService: """상품 데이터를 Blogger에 포스팅하는 메인 서비스""" @@ -313,17 +334,21 @@ def __init__(self): self.content_generator = ProductContentGenerator() self.blogger_service = BloggerBlogPostAdapter() - def post_product_to_blogger(self, product_data: ProductData, request: BlogContentRequest) -> dict: + def post_product_to_blogger( + self, product_data: ProductData, request: BlogContentRequest + ) -> dict: """상품 데이터를 Blogger에 포스팅""" try: # 1. GPT를 통한 콘텐츠 생성 - blog_content = self.content_generator.generate_blog_content(product_data, request) + blog_content = self.content_generator.generate_blog_content( + product_data, request + ) # 2. Blogger에 포스팅 self.blogger_service.post_content( title=blog_content.title, content=blog_content.content, - tags=blog_content.tags + tags=blog_content.tags, ) # 3. 성공 결과 반환 @@ -333,7 +358,7 @@ def post_product_to_blogger(self, product_data: ProductData, request: BlogConten "title": blog_content.title, "tags": blog_content.tags, "posted_at": datetime.now().isoformat(), - "product_tag": product_data.tag + "product_tag": product_data.tag, } except Exception as e: @@ -344,7 +369,7 @@ def post_product_to_blogger(self, product_data: ProductData, request: BlogConten "error": str(e), "platform": "blogger", "attempted_at": datetime.now().isoformat(), - "product_tag": getattr(product_data, "tag", "unknown") + "product_tag": getattr(product_data, "tag", "unknown"), } # def batch_post_products(self, products_data: List[Dict], request: BlogContentRequest) -> List[Dict[str, Any]]: @@ -359,4 +384,4 @@ def post_product_to_blogger(self, product_data: ProductData, request: BlogConten # import time # time.sleep(3) # 3초 대기 # - # return results \ No newline at end of file + # return results diff --git a/apps/pre-processing-service/app/test/test_product_blog_posting_service.py b/apps/pre-processing-service/app/test/test_product_blog_posting_service.py index 2757eb14..c5b1efde 100644 --- a/apps/pre-processing-service/app/test/test_product_blog_posting_service.py +++ b/apps/pre-processing-service/app/test/test_product_blog_posting_service.py @@ -1,6 +1,8 @@ import pytest from app.service.product_blog_posting_service import ( - ProductBlogPostingService, BlogContentRequest, ProductData + ProductBlogPostingService, + BlogContentRequest, + ProductData, ) # 샘플 데이터 @@ -15,18 +17,21 @@ "rating": 5.0, "options": [ {"name": "먼지 없는 창고 2차 필름 [코닝글라스 방폭丨초투명]", "stock": 0}, - {"name": "먼지 없는 창고 2차 필름 [코닝글라스 방폭丨훔쳐보기 방지]", "stock": 0} + { + "name": "먼지 없는 창고 2차 필름 [코닝글라스 방폭丨훔쳐보기 방지]", + "stock": 0, + }, ], "material_info": { "상표": "다른", "재료": "강화 유리", "필름 종류": "전막", "크기": "애플 16프로맥스( 6.9inch )", - "적용 모델": "iPhone13 Pro Max" + "적용 모델": "iPhone13 Pro Max", }, - "product_images": [] + "product_images": [], }, - "crawled_at": "2025-09-16 11:49:24" + "crawled_at": "2025-09-16 11:49:24", } @@ -41,13 +46,15 @@ def test_generate_blog_content(blog_service): content_style="informative", target_keywords=["아이폰", "강화필름", "보호필름", "스마트폰액세서리"], include_pricing=True, - content_length="medium" + content_length="medium", ) product_obj = ProductData.from_dict(sample_product_data) # 순수 콘텐츠 생성만 테스트 - blog_content = blog_service.content_generator.generate_blog_content(product_obj, request) + blog_content = blog_service.content_generator.generate_blog_content( + product_obj, request + ) assert blog_content.title assert "

      " in blog_content.content @@ -67,7 +74,7 @@ def post_content(self, title, content, tags): content_style="informative", target_keywords=["아이폰", "강화필름", "보호필름", "스마트폰액세서리"], include_pricing=True, - content_length="medium" + content_length="medium", ) product_obj = ProductData.from_dict(sample_product_data) From d5a766f835bc4a04e54d499741f2322386e2233a Mon Sep 17 00:00:00 2001 From: bwnfo3 Date: Tue, 16 Sep 2025 17:53:54 +0900 Subject: [PATCH 28/57] feat: WorkflowCardDto --- .../icebang/domain/workflow/dto/WorkflowCardDto.java | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java index b54a29c0..6d36224a 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java @@ -2,5 +2,15 @@ import lombok.Data; +import java.math.BigInteger; +import java.time.LocalDateTime; + @Data -public class WorkflowCardDto {} +public class WorkflowCardDto { + private BigInteger id; + private String name; + private String description; + private boolean isEnabled; + private String createdBy; + private LocalDateTime createdAt; +} \ No newline at end of file From f7bb9225efab394a6246b14ba30d2356af213709 Mon Sep 17 00:00:00 2001 From: bwnfo3 Date: Tue, 16 Sep 2025 17:54:03 +0900 Subject: [PATCH 29/57] feat: WorkflowMapper --- .../domain/workflow/mapper/WorkflowMapper.java | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java new file mode 100644 index 00000000..6e5ef1ca --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java @@ -0,0 +1,15 @@ +package site.icebang.domain.workflow.mapper; + +import site.icebang.common.dto.PageParams; +import site.icebang.domain.workflow.dto.WorkflowCardDto; + +import java.math.BigInteger; +import java.util.*; + +public interface WorkflowMapper { + List selectWorkflowList(PageParams pageParams); + + int selectWorkflowCount(PageParams pageParams); + + WorkflowCardDto selectWorkflowById(BigInteger id); +} From 71de16b6d9abd39decc65b81f34bc1b182bf87b9 Mon Sep 17 00:00:00 2001 From: bwnfo3 Date: Tue, 16 Sep 2025 17:54:14 +0900 Subject: [PATCH 30/57] feat: WorkflowMapper.xml --- .../mybatis/mapper/WorkflowMapper.xml | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml diff --git a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml new file mode 100644 index 00000000..dacade96 --- /dev/null +++ b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml @@ -0,0 +1,42 @@ + + + + + + + + + + \ No newline at end of file From 1ab796050958815373eb3a0e86b6895f45454b83 Mon Sep 17 00:00:00 2001 From: bwnfo3 Date: Tue, 16 Sep 2025 17:54:30 +0900 Subject: [PATCH 31/57] feat: WorkflowService --- .../workflow/service/WorkflowService.java | 31 ++++++++++++------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java index 1a358924..8edaad2d 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java @@ -9,19 +9,28 @@ import site.icebang.common.dto.PageResult; import site.icebang.common.service.PageableService; import site.icebang.domain.workflow.dto.WorkflowCardDto; +import site.icebang.domain.workflow.mapper.WorkflowMapper; + +import java.math.BigInteger; @Service @RequiredArgsConstructor public class WorkflowService implements PageableService { - @Override - @Transactional(readOnly = true) - public PageResult getPagedResult(PageParams pageParams) { - throw new RuntimeException("Not implemented"); - // return PageResult.from( - // pageParams, - // () -> workflowMapper.selectWorkflowList(pageParams), - // () -> workflowMapper.selectWorkflowCount(pageParams) - // ); - } -} + private final WorkflowMapper workflowMapper; + + @Override + @Transactional(readOnly = true) + public PageResult getPagedResult(PageParams pageParams) { + return PageResult.from( + pageParams, + () -> workflowMapper.selectWorkflowList(pageParams), + () -> workflowMapper.selectWorkflowCount(pageParams) + ); + } + + @Transactional(readOnly = true) + public WorkflowCardDto getWorkflowById(BigInteger id) { + return workflowMapper.selectWorkflowById(id); + } +} \ No newline at end of file From f4e57034f87c95cbf03b8dfabbc71bb5069558c1 Mon Sep 17 00:00:00 2001 From: bwnfo3 Date: Tue, 16 Sep 2025 17:54:57 +0900 Subject: [PATCH 32/57] =?UTF-8?q?feat:=20Workflow=EA=B4=80=EB=A0=A8=20api?= =?UTF-8?q?=20Security=EC=97=90=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../site/icebang/global/config/security/SecurityConfig.java | 1 + .../global/config/security/endpoints/SecurityEndpoints.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java b/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java index c915867d..f9e20640 100644 --- a/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java @@ -64,6 +64,7 @@ public SecurityFilterChain filterChain(HttpSecurity http) throws Exception { .permitAll() .requestMatchers("/auth/login", "/auth/logout") .permitAll() + .requestMatchers("/v0/workflows/**").permitAll() .requestMatchers("/v0/auth/check-session") .authenticated() .requestMatchers(SecurityEndpoints.DATA_ADMIN.getMatchers()) diff --git a/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java b/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java index 019337dc..c4c4b72f 100644 --- a/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java @@ -26,7 +26,7 @@ public enum SecurityEndpoints { OPS("/api/scheduler/**", "/api/monitoring/**"), // 일반 사용자 엔드포인트 - USER("/user/**", "/profile/**", "/v0/auth/check-session"); + USER("/user/**", "/profile/**", "/v0/auth/check-session","/v0/workflows/**"); private final String[] patterns; From a042c80dd998a76993202e83466e03af8b3fd39d Mon Sep 17 00:00:00 2001 From: bwnfo3 Date: Tue, 16 Sep 2025 18:01:18 +0900 Subject: [PATCH 33/57] fix: spotlessApply --- .../domain/workflow/dto/WorkflowCardDto.java | 18 ++++----- .../workflow/mapper/WorkflowMapper.java | 12 +++--- .../workflow/service/WorkflowService.java | 37 +++++++++---------- .../config/security/SecurityConfig.java | 3 +- .../security/endpoints/SecurityEndpoints.java | 2 +- 5 files changed, 36 insertions(+), 36 deletions(-) diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java index 6d36224a..a39ce0c3 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java @@ -1,16 +1,16 @@ package site.icebang.domain.workflow.dto; -import lombok.Data; - import java.math.BigInteger; import java.time.LocalDateTime; +import lombok.Data; + @Data public class WorkflowCardDto { - private BigInteger id; - private String name; - private String description; - private boolean isEnabled; - private String createdBy; - private LocalDateTime createdAt; -} \ No newline at end of file + private BigInteger id; + private String name; + private String description; + private boolean isEnabled; + private String createdBy; + private LocalDateTime createdAt; +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java index 6e5ef1ca..00afbebc 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java @@ -1,15 +1,15 @@ package site.icebang.domain.workflow.mapper; -import site.icebang.common.dto.PageParams; -import site.icebang.domain.workflow.dto.WorkflowCardDto; - import java.math.BigInteger; import java.util.*; +import site.icebang.common.dto.PageParams; +import site.icebang.domain.workflow.dto.WorkflowCardDto; + public interface WorkflowMapper { - List selectWorkflowList(PageParams pageParams); + List selectWorkflowList(PageParams pageParams); - int selectWorkflowCount(PageParams pageParams); + int selectWorkflowCount(PageParams pageParams); - WorkflowCardDto selectWorkflowById(BigInteger id); + WorkflowCardDto selectWorkflowById(BigInteger id); } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java index 8edaad2d..052b96fa 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java @@ -1,5 +1,7 @@ package site.icebang.domain.workflow.service; +import java.math.BigInteger; + import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -11,26 +13,23 @@ import site.icebang.domain.workflow.dto.WorkflowCardDto; import site.icebang.domain.workflow.mapper.WorkflowMapper; -import java.math.BigInteger; - @Service @RequiredArgsConstructor public class WorkflowService implements PageableService { - private final WorkflowMapper workflowMapper; - - @Override - @Transactional(readOnly = true) - public PageResult getPagedResult(PageParams pageParams) { - return PageResult.from( - pageParams, - () -> workflowMapper.selectWorkflowList(pageParams), - () -> workflowMapper.selectWorkflowCount(pageParams) - ); - } - - @Transactional(readOnly = true) - public WorkflowCardDto getWorkflowById(BigInteger id) { - return workflowMapper.selectWorkflowById(id); - } -} \ No newline at end of file + private final WorkflowMapper workflowMapper; + + @Override + @Transactional(readOnly = true) + public PageResult getPagedResult(PageParams pageParams) { + return PageResult.from( + pageParams, + () -> workflowMapper.selectWorkflowList(pageParams), + () -> workflowMapper.selectWorkflowCount(pageParams)); + } + + @Transactional(readOnly = true) + public WorkflowCardDto getWorkflowById(BigInteger id) { + return workflowMapper.selectWorkflowById(id); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java b/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java index f9e20640..61d668cc 100644 --- a/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/security/SecurityConfig.java @@ -64,7 +64,8 @@ public SecurityFilterChain filterChain(HttpSecurity http) throws Exception { .permitAll() .requestMatchers("/auth/login", "/auth/logout") .permitAll() - .requestMatchers("/v0/workflows/**").permitAll() + .requestMatchers("/v0/workflows/**") + .permitAll() .requestMatchers("/v0/auth/check-session") .authenticated() .requestMatchers(SecurityEndpoints.DATA_ADMIN.getMatchers()) diff --git a/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java b/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java index c4c4b72f..bdd0eb48 100644 --- a/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java @@ -26,7 +26,7 @@ public enum SecurityEndpoints { OPS("/api/scheduler/**", "/api/monitoring/**"), // 일반 사용자 엔드포인트 - USER("/user/**", "/profile/**", "/v0/auth/check-session","/v0/workflows/**"); + USER("/user/**", "/profile/**", "/v0/auth/check-session", "/v0/workflows/**"); private final String[] patterns; From a1d776889efe6cb2e2589fa1433e45d35565969c Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Tue, 16 Sep 2025 20:20:49 +0900 Subject: [PATCH 34/57] RDB Log4j2 append (experimental) (#104) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Workflow history 데이터베이스 input * chore: e2e test logger 세팅 단 * chore: Workflow logging example controller 이름 변경 Changes to be committed: renamed: src/main/java/site/icebang/domain/TestController.java -> src/main/java/site/icebang/domain/WorkflowLogInsertExampleController.java * chore: Production workflow log (RDB) 작성 --- apps/user-service/build.gradle | 14 +- .../WorkflowLogInsertExampleController.java | 34 +++ .../aop/logging/WorkflowLoggingAspect.java | 3 + .../security/endpoints/SecurityEndpoints.java | 3 +- .../exception/GlobalExceptionHandler.java | 3 +- .../service/DynamicSchedulerService.java | 51 +++-- .../src/main/resources/log4j2-develop.yml | 78 ++++--- .../src/main/resources/log4j2-production.yml | 200 ++++++++++++------ .../src/main/resources/log4j2-test-e2e.yml | 63 +++--- 9 files changed, 288 insertions(+), 161 deletions(-) create mode 100644 apps/user-service/src/main/java/site/icebang/domain/WorkflowLogInsertExampleController.java create mode 100644 apps/user-service/src/main/java/site/icebang/global/aop/logging/WorkflowLoggingAspect.java diff --git a/apps/user-service/build.gradle b/apps/user-service/build.gradle index 8aa7715a..29f095ea 100644 --- a/apps/user-service/build.gradle +++ b/apps/user-service/build.gradle @@ -47,21 +47,17 @@ dependencies { // batch implementation 'org.springframework.boot:spring-boot-starter-batch' - // Log4j2 - 모든 모듈을 2.22.1로 통일 implementation 'org.springframework.boot:spring-boot-starter-log4j2' - implementation 'org.apache.logging.log4j:log4j-core:2.22.1' - implementation 'org.apache.logging.log4j:log4j-api:2.22.1' - implementation 'org.apache.logging.log4j:log4j-slf4j2-impl:2.22.1' - implementation 'org.apache.logging.log4j:log4j-jul:2.22.1' implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml' - implementation 'org.apache.logging.log4j:log4j-layout-template-json:2.22.1' - + implementation 'org.apache.logging.log4j:log4j-layout-template-json' + implementation 'pl.tkowalcz.tjahzi:log4j2-appender-nodep:0.9.17' implementation 'org.apache.httpcomponents:httpclient:4.5.14' implementation 'org.apache.httpcomponents:httpcore:4.4.16' - implementation 'pl.tkowalcz.tjahzi:log4j2-appender-nodep:0.9.17' // 비동기 로깅 - implementation 'com.lmax:disruptor:3.4.4' +// implementation 'com.lmax:disruptor:3.4.4' +// implementation 'org.apache.commons:commons-dbcp2' +// implementation 'org.apache.commons:commons-pool2' implementation "io.micrometer:micrometer-tracing-bridge-brave" implementation "io.micrometer:micrometer-tracing" diff --git a/apps/user-service/src/main/java/site/icebang/domain/WorkflowLogInsertExampleController.java b/apps/user-service/src/main/java/site/icebang/domain/WorkflowLogInsertExampleController.java new file mode 100644 index 00000000..c3e225b7 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/WorkflowLogInsertExampleController.java @@ -0,0 +1,34 @@ +package site.icebang.domain; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.slf4j.MDC; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import lombok.extern.slf4j.Slf4j; + +import site.icebang.common.dto.ApiResponse; + +@RestController +@RequestMapping("/v0/check-execution-log-insert") +@Slf4j +public class WorkflowLogInsertExampleController { + private static final Logger workflowLogger = LoggerFactory.getLogger("WORKFLOW_HISTORY"); + + @GetMapping("") + public ApiResponse test() { + log.info("@@"); + // MDC.put("traceId", UUID.randomUUID().toString()); + MDC.put("sourceId", "o1"); + MDC.put("executionType", "WORKFLOW"); + // MDC.put("sourceId", "test-controller"); + + // 이 로그는 DB에 저장됨 + workflowLogger.info("SLF4J로 찍은 워크플로우 로그"); + + MDC.clear(); + return ApiResponse.success("hi"); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/global/aop/logging/WorkflowLoggingAspect.java b/apps/user-service/src/main/java/site/icebang/global/aop/logging/WorkflowLoggingAspect.java new file mode 100644 index 00000000..8e2d26c3 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/global/aop/logging/WorkflowLoggingAspect.java @@ -0,0 +1,3 @@ +package site.icebang.global.aop.logging; + +public class WorkflowLoggingAspect {} diff --git a/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java b/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java index 019337dc..16065d8e 100644 --- a/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/security/endpoints/SecurityEndpoints.java @@ -11,7 +11,8 @@ public enum SecurityEndpoints { "/js/**", "/images/**", "/v0/organizations/**", - "/v0/auth/register"), + "/v0/auth/register", + "/v0/check-execution-log-insert"), // 데이터 관리 관련 엔드포인트 DATA_ADMIN("/admin/**", "/api/admin/**", "/management/**", "/actuator/**"), diff --git a/apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java b/apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java index 6923f455..4eba15ae 100644 --- a/apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java +++ b/apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java @@ -24,8 +24,7 @@ public ApiResponse handleValidation(MethodArgumentNotValidException ex) @ExceptionHandler(Exception.class) @ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR) public ApiResponse handleGeneric(Exception ex) { - return ApiResponse.error( - "Internal error: " + ex.getMessage(), HttpStatus.INTERNAL_SERVER_ERROR); + return ApiResponse.error("Internal error: ", HttpStatus.INTERNAL_SERVER_ERROR); } @ExceptionHandler(NoResourceFoundException.class) diff --git a/apps/user-service/src/main/java/site/icebang/schedule/service/DynamicSchedulerService.java b/apps/user-service/src/main/java/site/icebang/schedule/service/DynamicSchedulerService.java index b81e30eb..b78c048e 100644 --- a/apps/user-service/src/main/java/site/icebang/schedule/service/DynamicSchedulerService.java +++ b/apps/user-service/src/main/java/site/icebang/schedule/service/DynamicSchedulerService.java @@ -1,16 +1,12 @@ package site.icebang.schedule.service; -import java.time.LocalDateTime; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ScheduledFuture; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobParametersBuilder; import org.springframework.batch.core.launch.JobLauncher; import org.springframework.context.ApplicationContext; import org.springframework.scheduling.TaskScheduler; -import org.springframework.scheduling.support.CronTrigger; import org.springframework.stereotype.Service; import lombok.RequiredArgsConstructor; @@ -30,29 +26,30 @@ public class DynamicSchedulerService { public void register(Schedule schedule) { // TODO: schedule.getWorkflowId()를 기반으로 실행할 Job의 이름을 DB에서 조회 - String jobName = "blogContentJob"; // 예시 - Job jobToRun = applicationContext.getBean(jobName, Job.class); - - Runnable runnable = - () -> { - try { - JobParametersBuilder paramsBuilder = new JobParametersBuilder(); - paramsBuilder.addString("runAt", LocalDateTime.now().toString()); - paramsBuilder.addLong("scheduleId", schedule.getScheduleId()); - jobLauncher.run(jobToRun, paramsBuilder.toJobParameters()); - } catch (Exception e) { - log.error( - "Failed to run scheduled job for scheduleId: {}", schedule.getScheduleId(), e); - } - }; - - CronTrigger trigger = new CronTrigger(schedule.getCronExpression()); - ScheduledFuture future = taskScheduler.schedule(runnable, trigger); - scheduledTasks.put(schedule.getScheduleId(), future); - log.info( - ">>>> Schedule registered: id={}, cron={}", - schedule.getScheduleId(), - schedule.getCronExpression()); + // String jobName = "blogContentJob"; // 예시 + // Job jobToRun = applicationContext.getBean(jobName, Job.class); + // + // Runnable runnable = + // () -> { + // try { + // JobParametersBuilder paramsBuilder = new JobParametersBuilder(); + // paramsBuilder.addString("runAt", LocalDateTime.now().toString()); + // paramsBuilder.addLong("scheduleId", schedule.getScheduleId()); + // jobLauncher.run(jobToRun, paramsBuilder.toJobParameters()); + // } catch (Exception e) { + // log.error( + // "Failed to run scheduled job for scheduleId: {}", schedule.getScheduleId(), + // e); + // } + // }; + // + // CronTrigger trigger = new CronTrigger(schedule.getCronExpression()); + // ScheduledFuture future = taskScheduler.schedule(runnable, trigger); + // scheduledTasks.put(schedule.getScheduleId(), future); + // log.info( + // ">>>> Schedule registered: id={}, cron={}", + // schedule.getScheduleId(), + // schedule.getCronExpression()); } public void remove(Long scheduleId) { diff --git a/apps/user-service/src/main/resources/log4j2-develop.yml b/apps/user-service/src/main/resources/log4j2-develop.yml index 41a369bf..8e68569b 100644 --- a/apps/user-service/src/main/resources/log4j2-develop.yml +++ b/apps/user-service/src/main/resources/log4j2-develop.yml @@ -29,7 +29,7 @@ Configuration: target: SYSTEM_OUT PatternLayout: pattern: ${console-layout-pattern} - disableAnsi: false +# disableAnsi: false # Loki Appender - 개발환경 모니터링용 Loki: @@ -60,31 +60,35 @@ Configuration: value: "${ctx:sourceId}" - name: "runId" value: "${ctx:runId}" + JDBC: + name: workflow-appender + tableName: "execution_log" + bufferSize: 0 + ignoreExceptions: false -# ExecutionDB: -# name: ExecutionDB -# class: org.apache.logging.log4j.core.appender.db.jdbc.JdbcAppender -# tableName: execution_log -# columnMappings: -# - name: log_level -# pattern: "%level" -# - name: log_message -# pattern: "%message" -# - name: trace_id -# pattern: "%X{traceId}" -# - name: execution_type -# pattern: "%X{executionType}" -# - name: source_id -# pattern: "%X{sourceId}" -# - name: run_id -# pattern: "%X{runId}" -# - name: executed_at -# pattern: "%d{yyyy-MM-dd HH:mm:ss}" -# connectionSource: -# class: org.apache.logging.log4j.core.appender.db.jdbc.DataSourceConnectionSource -# dataSource: "#dataSource + DriverManager: + connectionString: "jdbc:mariadb://localhost:3306/pre_process" + driverClassName: "org.mariadb.jdbc.Driver" + userName: "mariadb" + password: "qwer1234" - # 개발용 일반 로그 파일 + ColumnMapping: + - name: "execution_type" + pattern: "%X{executionType}" + - name: "source_id" + pattern: "%X{sourceId}" + - name: "log_level" + pattern: "%level" + - name: "executed_at" + pattern: "%d{yyyy-MM-dd HH:mm:ss.SSS}" # 패턴으로 시간 직접 지정 + - name: "log_message" + pattern: "%message" + - name: "trace_id" + pattern: "%X{traceId}" + - name: "reserved1" + pattern: "%X{spanId}" +# - name: "config_snapshot" +# pattern: "%X{configSnapshot}" File: - name: file-dev-appender fileName: ${dev-log} @@ -107,7 +111,7 @@ Configuration: Logger: # 애플리케이션 로그 - 개발 시 모든 레벨 + Loki 전송 - name: site.icebang - additivity: false + additivity: "false" level: DEBUG AppenderRef: - ref: console-appender @@ -115,9 +119,19 @@ Configuration: - ref: file-dev-appender - ref: file-error-appender + - name: "WORKFLOW_HISTORY" + level: DEBUG + additivity: "false" + AppenderRef: + - ref: workflow-appender + - ref: loki-appender + - ref: console-appender + - ref: file-dev-appender + - ref: file-error-appender + # Spring Framework - 개발 시 필요한 정보만 - name: org.springframework - additivity: false + additivity: "false" level: INFO AppenderRef: - ref: console-appender @@ -126,7 +140,7 @@ Configuration: # Spring Security - 인증 디버깅용 - name: org.springframework.security level: DEBUG - additivity: false + additivity: "false" AppenderRef: - ref: console-appender - ref: file-dev-appender @@ -135,7 +149,7 @@ Configuration: # 웹 요청 로그 - API 개발 시 유용 - name: org.springframework.web level: DEBUG - additivity: false + additivity: "false" AppenderRef: - ref: console-appender - ref: file-dev-appender @@ -144,7 +158,7 @@ Configuration: # 트랜잭션 로그 - DB 작업 디버깅 - name: org.springframework.transaction level: DEBUG - additivity: false + additivity: "false" AppenderRef: - ref: console-appender - ref: file-dev-appender @@ -152,18 +166,18 @@ Configuration: # HikariCP 로그 비활성화 - name: com.zaxxer.hikari - level: OFF + level: "OFF" # SQL 로그 - 개발 시 쿼리 확인용 (필요시 활성화) - name: org.hibernate.SQL level: DEBUG - additivity: false + additivity: "false" AppenderRef: - ref: console-appender # 파라미터 바인딩 로그 (필요시 활성화) - name: org.hibernate.type.descriptor.sql.BasicBinder level: TRACE - additivity: false + additivity: "false" AppenderRef: - ref: console-appender \ No newline at end of file diff --git a/apps/user-service/src/main/resources/log4j2-production.yml b/apps/user-service/src/main/resources/log4j2-production.yml index 2e7d282b..455b2f22 100644 --- a/apps/user-service/src/main/resources/log4j2-production.yml +++ b/apps/user-service/src/main/resources/log4j2-production.yml @@ -1,126 +1,194 @@ Configuration: + status: INFO name: production properties: property: + - name: "app-name" + value: "user-service" - name: "log-path" value: "./logs" - name: "charset-UTF-8" value: "UTF-8" - # 통일된 콘솔 패턴 - 모든 로그에 RequestId 포함 + # 프로덕션 환경용 콘솔 패턴 - 구조화된 로그 - name: "console-layout-pattern" - value: "%highlight{[%-5level]} [%X{traceId}] %d{MM-dd HH:mm:ss} [%t] %n %msg%n%n" - # 파일용 상세 패턴 - RequestId 포함 + value: "%highlight{[%-5level]} [%X{traceId}] [%X{spanId}] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %logger{36} - %msg%n" + # 파일용 패턴 - name: "file-layout-pattern" value: "[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n" - # 로그 파일 경로들 - - name: "info-log" - value: ${log-path}/user-service/info.log + # 프로덕션 환경용 로그 파일들 + - name: "prod-log" + value: ${log-path}/production/app.log - name: "error-log" - value: ${log-path}/user-service/error.log - - name: "auth-log" - value: ${log-path}/user-service/auth.log - - name: "json-log" - value: ${log-path}/user-service/json-info.log + value: ${log-path}/production/error.log - # [Appenders] 로그 기록방식 정의 Appenders: - # 통일된 콘솔 출력 + # 콘솔 출력 - 프로덕션에서는 최소한의 정보만 Console: name: console-appender target: SYSTEM_OUT PatternLayout: pattern: ${console-layout-pattern} - # 롤링 파일 로그 - RollingFile: - name: rolling-file-appender - fileName: ${log-path}/rolling-file.log - filePattern: "logs/archive/rolling-file.log.%d{yyyy-MM-dd-hh-mm}_%i.gz" - PatternLayout: - charset: ${charset-UTF-8} - pattern: ${file-layout-pattern} - Policies: - SizeBasedTriggeringPolicy: - size: "200KB" - TimeBasedTriggeringPolicy: - interval: "1" - DefaultRollOverStrategy: - max: "30" - fileIndex: "max" - - # 파일 로그들 + # Loki Appender - 프로덕션 모니터링용 + Loki: + name: loki-appender + host: localhost + port: 3100 + JsonLayout: + compact: true + eventEol: true + includeStacktrace: true + KeyValuePair: + - key: "app" + value: "${app-name}" + - key: "env" + value: "production" + Label: + - name: "app" + value: "${app-name}" + - name: "env" + value: "production" + - name: "traceId" + value: "${ctx:traceId}" + - name: "spanId" + value: "${ctx:spanId}" + - name: "executionType" + value: "${ctx:executionType:-application}" + - name: "sourceId" + value: "${ctx:sourceId}" + - name: "runId" + value: "${ctx:runId}" + + JDBC: + name: workflow-appender + tableName: "execution_log" + bufferSize: 0 + ignoreExceptions: false + + DriverManager: + connectionString: "jdbc:mariadb://${env:DB_HOST}:${env:DB_PORT}/${env:DB_NAME}" + driverClassName: "org.mariadb.jdbc.Driver" + userName: "${env:DB_USER}" + password: "${env:DB_PASS}" + + ColumnMapping: + - name: "execution_type" + pattern: "%X{executionType}" + - name: "source_id" + pattern: "%X{sourceId}" + - name: "log_level" + pattern: "%level" + - name: "executed_at" + pattern: "%d{yyyy-MM-dd HH:mm:ss.SSS}" + - name: "log_message" + pattern: "%message" + - name: "trace_id" + pattern: "%X{traceId}" + - name: "reserved1" + pattern: "%X{spanId}" + File: - - name: file-info-appender - fileName: ${info-log} + - name: file-prod-appender + fileName: ${prod-log} PatternLayout: pattern: ${file-layout-pattern} + # 로그 파일 롤링 설정 + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + modulate: true + SizeBasedTriggeringPolicy: + size: "100 MB" + DefaultRolloverStrategy: + max: 30 + filePattern: ${log-path}/production/app.%d{yyyy-MM-dd}.%i.log + - name: file-error-appender fileName: ${error-log} PatternLayout: pattern: ${file-layout-pattern} - - name: file-auth-appender - fileName: ${auth-log} - PatternLayout: - pattern: ${file-layout-pattern} - - name: file-json-info-appender - fileName: ${json-log} - PatternLayout: - pattern: ${file-layout-pattern} + ThresholdFilter: + level: ERROR + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + modulate: true + SizeBasedTriggeringPolicy: + size: "100 MB" + DefaultRolloverStrategy: + max: 30 + filePattern: ${log-path}/production/error.%d{yyyy-MM-dd}.%i.log - # [Loggers] 로그 출력 범위를 정의 Loggers: - # [Loggers - Root] 모든 로그를 기록하는 최상위 로그를 정의 + # Root 로거 - 프로덕션에서는 WARN 레벨 Root: - level: OFF + level: WARN AppenderRef: - ref: console-appender - - ref: rolling-file-appender - # [Loggers - Loggers] 특정 패키지나 클래스에 대한 로그를 정의 Logger: - # 1. Spring Framework 로그 - - name: org.springframework + # 애플리케이션 로그 - 프로덕션에서는 INFO 레벨 + - name: site.icebang additivity: "false" - level: DEBUG + level: INFO AppenderRef: - ref: console-appender - - ref: file-info-appender + - ref: loki-appender + - ref: file-prod-appender - ref: file-error-appender - # 2. 애플리케이션 로그 - - name: site.icebang + - name: "WORKFLOW_HISTORY" + level: INFO additivity: "false" - level: TRACE AppenderRef: + - ref: workflow-appender + - ref: loki-appender - ref: console-appender - - ref: file-info-appender + - ref: file-prod-appender - ref: file-error-appender - # 3. HikariCP 로그 비활성화 - - name: com.zaxxer.hikari - level: OFF + # Spring Framework - 프로덕션에서는 WARN 레벨 + - name: org.springframework + additivity: "false" + level: WARN + AppenderRef: + - ref: console-appender + - ref: file-prod-appender - # 4. Spring Security 로그 - 인증/인가 추적에 중요 + # Spring Security - 프로덕션에서는 WARN 레벨 - name: org.springframework.security - level: DEBUG + level: WARN additivity: "false" AppenderRef: - ref: console-appender - - ref: file-auth-appender + - ref: file-prod-appender - # 5. 웹 요청 로그 - 요청 처리 과정 추적 + # 웹 요청 로그 - 프로덕션에서는 INFO 레벨 - name: org.springframework.web - level: DEBUG + level: INFO additivity: "false" AppenderRef: - ref: console-appender - - ref: file-info-appender + - ref: file-prod-appender + - ref: loki-appender - # 6. 트랜잭션 로그 - DB 작업 추적 + # 트랜잭션 로그 - 프로덕션에서는 WARN 레벨 - name: org.springframework.transaction - level: DEBUG + level: WARN additivity: "false" AppenderRef: - ref: console-appender - - ref: file-info-appender \ No newline at end of file + - ref: file-prod-appender + + # HikariCP 로그 비활성화 + - name: com.zaxxer.hikari + level: "OFF" + + # SQL 로그 비활성화 - 프로덕션에서는 성능상 비활성화 + - name: org.hibernate.SQL + level: "OFF" + + # 파라미터 바인딩 로그 비활성화 + - name: org.hibernate.type.descriptor.sql.BasicBinder + level: "OFF" \ No newline at end of file diff --git a/apps/user-service/src/main/resources/log4j2-test-e2e.yml b/apps/user-service/src/main/resources/log4j2-test-e2e.yml index 557f426b..53acccf2 100644 --- a/apps/user-service/src/main/resources/log4j2-test-e2e.yml +++ b/apps/user-service/src/main/resources/log4j2-test-e2e.yml @@ -34,7 +34,7 @@ Configuration: Loki: name: loki-appender host: localhost - port: ${sys:loki.port} + port: "${loki-port}" JsonLayout: compact: true eventEol: true @@ -60,28 +60,33 @@ Configuration: - name: "runId" value: "${ctx:runId}" -# ExecutionDB: -# name: ExecutionDB -# class: org.apache.logging.log4j.core.appender.db.jdbc.JdbcAppender -# tableName: execution_log -# columnMappings: -# - name: log_level -# pattern: "%level" -# - name: log_message -# pattern: "%message" -# - name: trace_id -# pattern: "%X{traceId}" -# - name: execution_type -# pattern: "%X{executionType}" -# - name: source_id -# pattern: "%X{sourceId}" -# - name: run_id -# pattern: "%X{runId}" -# - name: executed_at -# pattern: "%d{yyyy-MM-dd HH:mm:ss}" -# connectionSource: -# class: org.apache.logging.log4j.core.appender.db.jdbc.DataSourceConnectionSource -# dataSource: "#dataSource + JDBC: + name: workflow-appender + tableName: "execution_log" + bufferSize: 0 + ignoreExceptions: false + + DriverManager: + connectionString: ${DriverManager.connectionString} + driverClassName: ${DriverManager.driverClassName} + userName: ${DriverManager.userName} + password: ${DriverManager.password} + + ColumnMapping: + - name: "execution_type" + pattern: "%X{executionType}" + - name: "source_id" + pattern: "%X{sourceId}" + - name: "log_level" + pattern: "%level" + - name: "executed_at" + pattern: "%d{yyyy-MM-dd HH:mm:ss.SSS}" # 패턴으로 시간 직접 지정 + - name: "log_message" + pattern: "%message" + - name: "trace_id" + pattern: "%X{traceId}" + - name: "reserved1" + pattern: "%X{spanId}" # 개발용 일반 로그 파일 File: @@ -114,6 +119,16 @@ Configuration: - ref: file-dev-appender - ref: file-error-appender + - name: "WORKFLOW_HISTORY" + level: DEBUG + additivity: "false" + AppenderRef: + - ref: workflow-appender + - ref: loki-appender + - ref: console-appender + - ref: file-dev-appender + - ref: file-error-appender + # Spring Framework - 개발 시 필요한 정보만 - name: org.springframework additivity: false @@ -151,7 +166,7 @@ Configuration: # HikariCP 로그 비활성화 - name: com.zaxxer.hikari - level: OFF + level: "OFF" # SQL 로그 - 개발 시 쿼리 확인용 (필요시 활성화) - name: org.hibernate.SQL From 1eea9dfe184a14e01c3ebb30192a9bd868a56fa8 Mon Sep 17 00:00:00 2001 From: JiHoon Date: Wed, 17 Sep 2025 00:36:44 +0900 Subject: [PATCH 35/57] =?UTF-8?q?feat=20:=20HTML=20=EC=9A=94=EC=86=8C=20?= =?UTF-8?q?=EC=9E=90=EB=8F=99=20=EC=B6=94=EC=B6=9C=20Util=20=ED=8C=8C?= =?UTF-8?q?=EC=9D=BC?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Selenium으로 HTML 가져오기 -> HTML 불필요한 태그 삭제 -> LLM 프롬프트로 요소 추출 자동화 현재 네이버, 티스토리 블로그 작동 확인 완료. 해당 파일 사용 시, 페이지 분석 및 상세한 프롬프트 작성 필수 --- .../app/test/test_extraction_html.py | 487 ++++++++++++++++++ .../app/utils/llm_extractor.py | 252 +++++++++ .../app/utils/preprocess_html.py | 166 ++++++ 3 files changed, 905 insertions(+) create mode 100644 apps/pre-processing-service/app/test/test_extraction_html.py create mode 100644 apps/pre-processing-service/app/utils/llm_extractor.py create mode 100644 apps/pre-processing-service/app/utils/preprocess_html.py diff --git a/apps/pre-processing-service/app/test/test_extraction_html.py b/apps/pre-processing-service/app/test/test_extraction_html.py new file mode 100644 index 00000000..0c2ce196 --- /dev/null +++ b/apps/pre-processing-service/app/test/test_extraction_html.py @@ -0,0 +1,487 @@ +# if __name__ == "__main__": +# from app.utils.crawling_util import CrawlingUtil +# from app.utils.llm_extractor import LLMExtractor +# from selenium.webdriver.common.by import By +# from selenium.webdriver.support import expected_conditions as EC +# from selenium.common.exceptions import TimeoutException +# from selenium.webdriver.common.keys import Keys +# from selenium.webdriver.common.action_chains import ActionChains +# import pyperclip +# import time +# import json +# +# crawling_util = CrawlingUtil() +# llm_extractor = LLMExtractor() +# +# start_time = time.time() +# driver = crawling_util.get_driver() +# wait_driver = crawling_util.get_wait() +# +# # ========== 로그인 부분 ========== +# driver.get("https://nid.naver.com/nidlogin.login") +# time.sleep(5) +# html = driver.page_source +# +# print(f"원본 HTML 길이: {len(html)}") +# html_list = preprocess_html(html) +# +# result_html = 0 +# +# for html in html_list: +# result_html += len(html) +# +# print(f"전처리된 HTML 총 길이: {result_html}, 분할된 청크 수: {len(html_list)}") +# +# result = [] +# +# for idx, html in enumerate(html_list): +# print(f"전처리된 HTML 길이: {len(html)}, List {idx}번 ") +# prompt = llm_extractor.extraction_prompt("아이디, 비밀번호를 입력할 수 있는 요소, 로그인 버튼을 클릭할 수 있는 요소", html) +# +# response = llm_extractor.client.chat.completions.create( +# model=llm_extractor.model, +# messages=[{"role": "system", "content": prompt}], +# temperature=0, +# response_format={"type": "json_object"} +# ) +# +# result_json = response.choices[0].message.content +# +# result.append(result_json) +# +# parse_result = [json.loads(item) for item in result] +# print(json.dumps(parse_result, indent=4, ensure_ascii=False)) +# +# # 로그인 +# naver_id = "all2641" +# naver_password = "kdyn2641*" +# +# # 모든 결과에서 요소들을 수집 (개선된 방식) +# all_elements = {} +# +# for item in parse_result: +# if not item.get("found"): +# print("요소를 찾지 못했습니다.") +# continue +# +# elements = item.get("elements", []) +# for element in elements: +# for key, value in element.items(): +# # ID 관련 요소 +# if "id" in key.lower(): +# if "css_selector" in key: +# all_elements["id_css"] = value +# elif "xpath" in key: +# all_elements["id_xpath"] = value +# +# # Password 관련 요소 +# elif "password" in key.lower() or "pw" in key.lower(): +# if "css_selector" in key: +# all_elements["pw_css"] = value +# elif "xpath" in key: +# all_elements["pw_xpath"] = value +# +# # Login 관련 요소 +# elif "login" in key.lower(): +# if "css_selector" in key: +# all_elements["login_css"] = value +# elif "xpath" in key: +# all_elements["login_xpath"] = value +# +# print(f"수집된 요소들: {all_elements}") +# +# # 아이디 입력 +# id_input = None +# if all_elements.get("id_css"): +# try: +# id_input = wait_driver.until( +# EC.presence_of_element_located((By.CSS_SELECTOR, all_elements["id_css"])) +# ) +# print(f"아이디 요소 발견 (CSS): {all_elements['id_css']}") +# time.sleep(2) +# except TimeoutException: +# print(f"아이디 요소를 찾지 못했습니다 (CSS): {all_elements['id_css']}") +# +# if not id_input and all_elements.get("id_xpath"): +# try: +# id_input = wait_driver.until( +# EC.presence_of_element_located((By.XPATH, all_elements["id_xpath"])) +# ) +# print(f"아이디 요소 발견 (XPath): {all_elements['id_xpath']}") +# time.sleep(2) +# except TimeoutException: +# print(f"아이디 요소를 찾지 못했습니다 (XPath): {all_elements['id_xpath']}") +# +# if id_input: +# id_input.click() +# time.sleep(1) +# pyperclip.copy(naver_id) +# time.sleep(1) +# id_input.send_keys(Keys.COMMAND, "v") +# time.sleep(1) +# +# # 비밀번호 입력 +# password_input = None +# if all_elements.get("pw_css"): +# try: +# password_input = wait_driver.until( +# EC.presence_of_element_located((By.CSS_SELECTOR, all_elements["pw_css"])) +# ) +# print(f"비밀번호 요소 발견 (CSS): {all_elements['pw_css']}") +# time.sleep(2) +# except TimeoutException: +# print(f"비밀번호 요소를 찾지 못했습니다 (CSS): {all_elements['pw_css']}") +# +# if not password_input and all_elements.get("pw_xpath"): +# try: +# password_input = wait_driver.until( +# EC.presence_of_element_located((By.XPATH, all_elements["pw_xpath"])) +# ) +# print(f"비밀번호 요소 발견 (XPath): {all_elements['pw_xpath']}") +# time.sleep(2) +# except TimeoutException: +# print(f"비밀번호 요소를 찾지 못했습니다 (XPath): {all_elements['pw_xpath']}") +# +# if password_input: +# password_input.click() +# time.sleep(1) +# pyperclip.copy(naver_password) +# time.sleep(1) +# password_input.send_keys(Keys.COMMAND, "v") +# time.sleep(1) +# +# # 로그인 버튼 클릭 +# login_button = None +# if all_elements.get("login_css"): +# try: +# login_selector = all_elements["login_css"].replace('\\', '') +# login_button = wait_driver.until( +# EC.element_to_be_clickable((By.CSS_SELECTOR, login_selector)) +# ) +# print(f"로그인 버튼 요소 발견 (CSS): {login_selector}") +# except TimeoutException: +# print(f"로그인 버튼 요소를 찾지 못했습니다 (CSS): {all_elements['login_css']}") +# +# if not login_button and all_elements.get("login_xpath"): +# try: +# login_button = wait_driver.until( +# EC.element_to_be_clickable((By.XPATH, all_elements["login_xpath"])) +# ) +# print(f"로그인 버튼 요소 발견 (XPath): {all_elements['login_xpath']}") +# except TimeoutException: +# print(f"로그인 버튼 요소를 찾지 못했습니다 (XPath): {all_elements['login_xpath']}") +# +# if login_button: +# login_button.click() +# print("로그인 버튼 클릭 완료") +# +# # 로그인 완료 대기 +# time.sleep(5) +# print("로그인 완료, 블로그 포스팅 시작...") +# +# # ========== 블로그 포스팅 부분 (도움말 닫기 버튼 추가) ========== +# try: +# # 네이버 블로그 글쓰기 페이지로 이동 +# post_content_url = f"https://blog.naver.com/PostWriteForm.naver?blogId={naver_id}&Redirect=Write&redirect=Write&widgetTypeCall=true&noTrackingCode=true&directAccess=false" +# driver.get(post_content_url) +# print("블로그 글쓰기 페이지로 이동 완료. 5초 대기...") +# time.sleep(10) +# +# blog_html = driver.page_source +# print(f"HTML 길이: {len(blog_html)}") +# blog_html_list = preprocess_html(blog_html) +# blog_result_html = sum(len(html) for html in blog_html_list) +# print(f"전처리된 HTML 총 길이: {blog_result_html}, 분할된 청크 수: {len(blog_html_list)}") +# +# # 테스트용 제목, 내용, 태그 +# test_title = "LLM 기반 자동화 포스팅" +# test_content = "이 포스트는 LLM이 iframe 내부의 HTML을 분석하여 자동으로 작성한 글입니다." +# test_tags = ["LLM", "자동화", "네이버블로그"] +# +# # 3. LLM을 사용해 iframe 내부의 블로그 요소들 추출 +# blog_result = [] +# +# for idx, html in enumerate(blog_html_list): +# print(f"HTML 청크 {idx + 1}/{len(blog_html_list)} 분석 중...") +# prompt = llm_extractor.naver_post_extraction_prompt(html) +# response = llm_extractor.client.chat.completions.create( +# model=llm_extractor.model, +# messages=[{"role": "system", "content": prompt}], +# temperature=0, +# response_format={"type": "json_object"} +# ) +# blog_result.append(response.choices[0].message.content) +# +# blog_parse_result = [json.loads(item) for item in blog_result] +# print("\n>> 블로그 요소 추출 결과:") +# print(json.dumps(blog_parse_result, indent=4, ensure_ascii=False)) +# +# # 4. 추출된 요소 정보 취합 +# blog_elements = {} +# for item in blog_parse_result: +# if not item.get("found"): continue +# for element in item.get("elements", []): +# for key, value in element.items(): +# if "title" in key.lower(): +# if "css_selector" in key: +# blog_elements["title_css"] = value +# elif "xpath" in key: +# blog_elements["title_xpath"] = value +# elif "content" in key.lower() or "body" in key.lower(): +# if "css_selector" in key: +# blog_elements["content_css"] = value +# elif "xpath" in key: +# blog_elements["content_xpath"] = value +# elif "help_close" in key.lower(): +# if "css_selector" in key: +# blog_elements["help_close_css"] = value +# elif "xpath" in key: +# blog_elements["help_close_xpath"] = value +# elif "first_publish" in key.lower(): +# if "css_selector" in key: +# blog_elements["first_publish_css"] = value +# elif "xpath" in key: +# blog_elements["first_publish_xpath"] = value +# elif "tag_input" in key.lower(): +# if "css_selector" in key: +# blog_elements["tag_input_css"] = value +# elif "xpath" in key: +# blog_elements["tag_input_xpath"] = value +# elif "final_publish" in key.lower(): +# if "css_selector" in key: +# blog_elements["final_publish_css"] = value +# elif "xpath" in key: +# blog_elements["final_publish_xpath"] = value +# +# print(f"\n>> 수집된 블로그 요소들: {blog_elements}") +# +# # 5. 도움말 닫기 버튼 클릭 (발행 버튼이 가려지지 않도록) +# help_close_button = None +# help_close_css = blog_elements.get("help_close_css") +# if help_close_css: +# try: +# help_close_button = wait_driver.until(EC.element_to_be_clickable((By.CSS_SELECTOR, help_close_css))) +# print(f"✅ 도움말 닫기 버튼 발견 (CSS): {help_close_css}") +# except TimeoutException: +# print(f"⚠️ 도움말 닫기 버튼을 찾지 못했습니다 (CSS): {help_close_css}") +# +# if not help_close_button: +# help_close_xpath = blog_elements.get("help_close_xpath") +# if help_close_xpath: +# try: +# help_close_button = wait_driver.until(EC.element_to_be_clickable((By.XPATH, help_close_xpath))) +# print(f"✅ 도움말 닫기 버튼 발견 (XPath): {help_close_xpath}") +# except TimeoutException: +# print(f"⚠️ 도움말 닫기 버튼을 찾지 못했습니다 (XPath): {help_close_xpath}") +# +# if help_close_button: +# try: +# help_close_button.click() +# print("✅ 도움말 닫기 버튼 클릭 완료") +# time.sleep(1) # 닫히는 시간 대기 +# except Exception as e: +# print(f"⚠️ 도움말 닫기 버튼 클릭 실패: {str(e)}") +# # JavaScript로 강제 클릭 시도 +# try: +# driver.execute_script("arguments[0].click();", help_close_button) +# print("✅ 도움말 닫기 버튼 JavaScript 클릭 완료") +# time.sleep(1) +# except Exception as js_e: +# print(f"❌ 도움말 닫기 버튼 JavaScript 클릭도 실패: {str(js_e)}") +# else: +# print("⚠️ 도움말 닫기 버튼을 찾지 못했습니다. se-utils 요소 직접 제거를 시도합니다.") +# # 직접 se-utils 요소 제거 +# try: +# driver.execute_script(""" +# var element = document.querySelector('.se-utils'); +# if (element) { +# element.style.display = 'none'; +# console.log('se-utils 요소를 숨겼습니다.'); +# } +# """) +# print("✅ se-utils 요소를 직접 숨김 처리했습니다.") +# except Exception as e: +# print(f"⚠️ se-utils 요소 숨김 처리 실패: {str(e)}") +# +# # 6. 제목 및 본문 입력 (CSS, XPath 순차 시도) +# # 제목 입력 +# title_input = None +# title_css = blog_elements.get("title_css") +# if title_css: +# try: +# title_input = wait_driver.until(EC.element_to_be_clickable((By.CSS_SELECTOR, title_css))) +# print(f"✅ 제목 요소 발견 (CSS): {title_css}") +# except TimeoutException: +# print(f"⚠️ 제목 요소를 찾지 못했습니다 (CSS): {title_css}") +# +# if not title_input: +# title_xpath = blog_elements.get("title_xpath") +# if title_xpath: +# try: +# title_input = wait_driver.until(EC.element_to_be_clickable((By.XPATH, title_xpath))) +# print(f"✅ 제목 요소 발견 (XPath): {title_xpath}") +# except TimeoutException: +# print(f"⚠️ 제목 요소를 찾지 못했습니다 (XPath): {title_xpath}") +# +# if title_input: +# ActionChains(driver).move_to_element(title_input).click().send_keys(test_title).perform() +# print("✅ 제목 입력 완료") +# else: +# print("❌ 제목 입력 요소를 최종적으로 찾지 못했습니다.") +# +# # 본문 입력 +# content_input = None +# content_css = blog_elements.get("content_css") +# if content_css: +# try: +# content_input = wait_driver.until(EC.element_to_be_clickable((By.CSS_SELECTOR, content_css))) +# print(f"✅ 본문 요소 발견 (CSS): {content_css}") +# except TimeoutException: +# print(f"⚠️ 본문 요소를 찾지 못했습니다 (CSS): {content_css}") +# +# if not content_input: +# content_xpath = blog_elements.get("content_xpath") +# if content_xpath: +# try: +# content_input = wait_driver.until(EC.element_to_be_clickable((By.XPATH, content_xpath))) +# print(f"✅ 본문 요소 발견 (XPath): {content_xpath}") +# except TimeoutException: +# print(f"⚠️ 본문 요소를 찾지 못했습니다 (XPath): {content_xpath}") +# +# if content_input: +# ActionChains(driver).move_to_element(content_input).click().send_keys(test_content).perform() +# print("✅ 본문 입력 완료") +# else: +# print("❌ 본문 입력 요소를 최종적으로 찾지 못했습니다.") +# +# # 7. 발행 버튼 클릭 (LLM이 찾은 선택자 사용) +# first_publish_button = None +# first_publish_css = blog_elements.get("first_publish_css") +# if first_publish_css: +# try: +# first_publish_button = wait_driver.until( +# EC.element_to_be_clickable((By.CSS_SELECTOR, first_publish_css))) +# print(f"✅ 첫 번째 발행 버튼 발견 (CSS): {first_publish_css}") +# except TimeoutException: +# print(f"⚠️ 첫 번째 발행 버튼을 찾지 못했습니다 (CSS): {first_publish_css}") +# +# if not first_publish_button: +# first_publish_xpath = blog_elements.get("first_publish_xpath") +# if first_publish_xpath: +# try: +# first_publish_button = wait_driver.until( +# EC.element_to_be_clickable((By.XPATH, first_publish_xpath))) +# print(f"✅ 첫 번째 발행 버튼 발견 (XPath): {first_publish_xpath}") +# except TimeoutException: +# print(f"⚠️ 첫 번째 발행 버튼을 찾지 못했습니다 (XPath): {first_publish_xpath}") +# +# if first_publish_button: +# try: +# # 일반 클릭 시도 +# first_publish_button.click() +# print("✅ 첫 번째 발행 버튼 클릭 완료. 팝업창을 기다립니다...") +# except Exception as click_error: +# print(f"⚠️ 일반 클릭 실패, JavaScript 클릭 시도: {str(click_error)}") +# driver.execute_script("arguments[0].click();", first_publish_button) +# print("✅ 첫 번째 발행 버튼 JavaScript 클릭 완료. 팝업창을 기다립니다...") +# +# time.sleep(3) +# else: +# print("❌ 첫 번째 발행 버튼을 최종적으로 찾지 못했습니다. 하드코딩 선택자를 시도합니다.") +# # 폴백: 하드코딩 선택자 사용 +# try: +# publish_button = wait_driver.until( +# EC.element_to_be_clickable((By.XPATH, "//button[.//span[normalize-space()='발행']]"))) +# +# try: +# publish_button.click() +# print("✅ 발행 버튼 하드코딩 클릭 완료. 팝업창을 기다립니다...") +# except Exception as click_error: +# driver.execute_script("arguments[0].click();", publish_button) +# print("✅ 발행 버튼 하드코딩 JavaScript 클릭 완료. 팝업창을 기다립니다...") +# +# time.sleep(3) +# except TimeoutException: +# print("❌ 하드코딩 발행 버튼도 찾지 못했습니다.") +# +# # 8. 태그 입력 및 최종 발행 (LLM이 찾은 선택자 사용) +# try: +# # 태그 입력 필드 찾기 +# tag_input = None +# tag_input_css = blog_elements.get("tag_input_css") +# if tag_input_css: +# try: +# tag_input = wait_driver.until(EC.element_to_be_clickable((By.CSS_SELECTOR, tag_input_css))) +# print(f"✅ 태그 입력 필드 발견 (CSS): {tag_input_css}") +# except TimeoutException: +# print(f"⚠️ 태그 입력 필드를 찾지 못했습니다 (CSS): {tag_input_css}") +# +# if not tag_input: +# tag_input_xpath = blog_elements.get("tag_input_xpath") +# if tag_input_xpath: +# try: +# tag_input = wait_driver.until(EC.element_to_be_clickable((By.XPATH, tag_input_xpath))) +# print(f"✅ 태그 입력 필드 발견 (XPath): {tag_input_xpath}") +# except TimeoutException: +# print(f"⚠️ 태그 입력 필드를 찾지 못했습니다 (XPath): {tag_input_xpath}") +# +# if not tag_input: +# # 폴백: 하드코딩 선택자 사용 +# tag_input = wait_driver.until(EC.element_to_be_clickable((By.CSS_SELECTOR, "input[placeholder*='태그']"))) +# print("✅ 태그 입력 필드 하드코딩 선택자로 발견") +# +# # 태그 입력 +# for tag in test_tags: +# tag_input.send_keys(tag) +# tag_input.send_keys(Keys.ENTER) +# time.sleep(0.5) +# print("✅ 태그 입력 완료") +# +# # 최종 발행 버튼 찾기 +# final_publish_button = None +# final_publish_css = blog_elements.get("final_publish_css") +# if final_publish_css: +# try: +# final_publish_button = wait_driver.until( +# EC.element_to_be_clickable((By.CSS_SELECTOR, final_publish_css))) +# print(f"✅ 최종 발행 버튼 발견 (CSS): {final_publish_css}") +# except TimeoutException: +# print(f"⚠️ 최종 발행 버튼을 찾지 못했습니다 (CSS): {final_publish_css}") +# +# if not final_publish_button: +# final_publish_xpath = blog_elements.get("final_publish_xpath") +# if final_publish_xpath: +# try: +# final_publish_button = wait_driver.until( +# EC.element_to_be_clickable((By.XPATH, final_publish_xpath))) +# print(f"✅ 최종 발행 버튼 발견 (XPath): {final_publish_xpath}") +# except TimeoutException: +# print(f"⚠️ 최종 발행 버튼을 찾지 못했습니다 (XPath): {final_publish_xpath}") +# +# if not final_publish_button: +# # 폴백: 하드코딩 선택자 사용 +# final_publish_button = wait_driver.until(EC.element_to_be_clickable( +# (By.XPATH, "//div[contains(@class,'popup')]//button[.//span[normalize-space()='발행']]"))) +# print("✅ 최종 발행 버튼 하드코딩 선택자로 발견") +# +# # 최종 발행 버튼 클릭 +# final_publish_button.click() +# print("✅ 최종 발행 버튼 클릭 완료!") +# +# wait_driver.until(EC.url_contains("PostView.naver"), timeout=10) +# print("\n🎉 블로그 포스팅 발행 최종 완료! 🎉") +# except TimeoutException: +# print("❌ 발행 팝업 처리 중 오류가 발생했습니다.") +# raise +# +# except Exception as e: +# print(f"블로그 포스팅 중 오류 발생: {str(e)}") +# +# # ... (이후 전체 소요 시간 측정 및 드라이버 종료 코드) ... +# +# end_time = time.time() +# print(f"전체 소요 시간: {end_time - start_time} seconds") +# +# # 대기 후 드라이버 종료 +# time.sleep(5) +# driver.quit() \ No newline at end of file diff --git a/apps/pre-processing-service/app/utils/llm_extractor.py b/apps/pre-processing-service/app/utils/llm_extractor.py new file mode 100644 index 00000000..96903018 --- /dev/null +++ b/apps/pre-processing-service/app/utils/llm_extractor.py @@ -0,0 +1,252 @@ +import os +from openai import OpenAI +from dotenv import load_dotenv + +load_dotenv() + +class LLMExtractor: + + def __init__(self, model = "gpt-4o"): + """ + LLMExtractor 초기화 + :param model: 사용할 LLM 모델 이름 + """ + + self.client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) + self.model = model + + def login_extraction_prompt(self, target_description: str, html: str): + """ + 네이버, 티스토리 통합 로그인 프롬프트 + :param html: 분석할 HTML + :param target_description: 추출 대상 설명 + :return: 프롬프트 문자열 + """ + + return f""" + # 지시 (Instructions): + 1. 당신은 HTML에서 웹 자동화에 필요한 정확한 요소를 찾는 전문가입니다. + 2. 당신의 임무는 사용자의 목표와 가장 일치하는 요소에 대한 CSS Selector와 XPath를 정확하게 찾아내어 지정된 JSON 형식으로 반환하는 것입니다. + + # 규칙 (Rules): + 1. 만약 요청한 요소가 HTML 문서에 존재하지 않는다면, 반드시 {{"found": false}} 만 반환해야 합니다. + 2. 억지로 추측하거나 존재하지 않는 요소에 대한 정보를 생성하지 마세요. + 3. name에는 요소의 이름을 나타내도록 지정하세요. 예: id, password, login_button, title, body 등 + 4. 반한되는 형식 : + {{ + "found": true/false, + "elements": [ + {{ + "name_css_selector": "CSS 선택자 문자열", + "name_xpath": "XPath 문자열" + }}, + ] + }} + + # 수행 (Execution): + 사용자의 요구 사항 : {target_description} + HTML 문서 : {html} + + """ + + def naver_post_extraction_prompt(self, html: str): + """ + 네이버 블로그 포스트 프롬프트 + :param html: 분석할 HTML + :return: 프롬프트 문자열 + """ + + return f""" + # 지시 (Instructions): + 1. 당신은 HTML에서 웹 자동화에 필요한 정확한 요소를 찾는 전문가입니다. + 2. 당신의 임무는 목표(Goal)와 가장 일치하는 요소에 대한 CSS Selector와 XPath를 정확하게 찾아내어 지정된 JSON 형식으로 반환하는 것입니다. + + # 규칙 (Rules): + 1. 만약 요청한 요소가 HTML 문서에 존재하지 않는다면, 반드시 {{"found": false}} 만 반환해야 합니다. + 2. 억지로 추측하거나 존재하지 않는 요소에 대한 정보를 생성하지 마세요. + + # 목표 (Goal): + + ## 제목 입력 영역 찾기: + "제목"이 포함된 요소 찾기 + - HTML에서 "제목"이라는 한글 텍스트를 포함한 모든 요소 검색 + - 이 요소와 같은 부모나 형제 관계에 있는 요소 찾기 + + ## 본문 입력 영역 찾기: + "본문"이 포함된 요소 찾기: + - HTML에서 "본문"이라는 한글 텍스트를 포함한 모든 요소 검색 + - 이 요소와 같은 부모나 형제 관계에 있는 요소 찾기 + + # 도움말 닫기 버튼 찾기: + "도움말"이 포함된 요소 찾기: + - "도움말"이라는 한글 텍스트를 포함한 모든 요소 검색 + - 이 요소와 같은 부모나 형제 관계에 있는 "닫기" 버튼 찾기 + + # 첫 번째 발행 버튼(팝업 열기용) 찾기: + "발행"이 포함된 버튼 요소 찾기: + - HTML에서 "발행"이라는 한글 텍스트를 포함한 모든 버튼 + - 이 버튼이 팝업을 여는 역할을 하는지 확인 + + # 태그 입력 필드 찾기: + "tag"가 포함된 요소 찾기: + - HTML에서 "tag"라는 단어가 포함된 모든 요소 검색 + - id나 placeholder에 "tag" or "태그" 관련 내용이 있는 것 + + # 최종 발행 버튼 찾기: + popup 내부의 발행 버튼 찾기: + - popup div 내부에 있는 "발행" 버튼 + - confirm_btn 클래스가 포함된 버튼 + + # 반환 형식: + {{ + "found": true/false, + "elements": [ + {{ + "title_css_selector": "제목 입력을 위한 요소의 CSS 선택자", + "title_xpath": "제목 입력을 위한 요소의 XPath" + }}, + {{ + "content_css_selector": "본문 입력을 위한 요소의 CSS 선택자", + "content_xpath": "본문 입력을 위한 요소의 XPath" + }}, + {{ + "help_close_css_selector": "도움말 닫기 버튼의 CSS 선택자", + "help_close_xpath": "도움말 닫기 버튼의 XPath" + }}, + {{ + "first_publish_css_selector": "첫 번째 발행 버튼(팝업 열기용)의 CSS 선택자", + "popup_publish_xpath": "첫 번째 발행 버튼(팝업 열기용)의 XPath" + }}, + {{ + "tag_input_css_selector": "태그 입력 필드의 CSS 선택자", + "tag_input_xpath": "태그 입력 필드의 XPath" + }}, + {{ + "final_publish_css_selector": "팝업 내의 발행 버튼의 CSS 선택자", + "final_publish_xpath": "팝업 내의 발행 버튼의 XPath" + }} + ] + }} + + # 분석할 HTML: + {html} + """ + + def tistory_post_extraction_prompt(self, html: str): + """ + 티스토리 기본 입력 요소들 (제목, 내용, 태그, 완료버튼) 추출 프롬프트 + :param html: 분석할 HTML + :return: 프롬프트 문자열 + """ + return f""" + # 지시 (Instructions): + 1. 당신은 HTML에서 웹 자동화에 필요한 정확한 요소를 찾는 전문가입니다. + 2. 당신의 임무는 목표(Goal)와 가장 일치하는 요소에 대한 CSS Selector와 XPath를 정확하게 찾아내어 지정된 JSON 형식으로 반환하는 것입니다. + + # 규칙 (Rules): + 1. 만약 요청한 요소가 HTML 문서에 존재하지 않는다면, 반드시 {{"found": false}} 만 반환해야 합니다. + 2. 억지로 추측하거나 존재하지 않는 요소에 대한 정보를 생성하지 마세요. + + # 목표 (Goal): + + ## 제목 입력 영역 찾기: + "제목"이 포함된 요소 찾기 + - HTML에서 "제목"이라는 한글 텍스트를 포함한 모든 요소 검색 + - 이 요소와 같은 부모나 형제 관계에 있는 요소 찾기 + + ## 글 내용 입력 영역 찾기: + "글 내용 입력"이 포함된 요소 찾기: + - iframe 내부의 요소 우선 검색 + - "글 내용 입력"이라는 한글 텍스트를 포함한 요소 검색 + - contenteditable="true" 속성을 가진 요소 우선 검색 + + # "tag" or "태그" 입력 필드 찾기: + "tag" or "태그"가 포함된 요소 찾기: + - HTML에서 "tag" or "태그"라는 텍스트를 포함한 모든 요소 검색 + - id나 placeholder에 "tag" or "태그" 관련 내용이 있는 것 + + # 완료 버튼 찾기: + "완료"가 포함된 버튼 요소 찾기: + - HTML에서 정확히 "완료"라는 한글 텍스트를 포함한 모든 버튼 + - 이 버튼이 글 작성을 완료하는 역할을 하는지 확인 + + # 반환 형식: + {{ + "found": true/false, + "elements": [ + {{ + "title_css_selector": "제목 입력을 위한 요소의 CSS 선택자 또는 null", + "title_xpath": "제목 입력을 위한 요소의 XPath 또는 null" + }}, + {{ + "content_css_selector": "글 내용 입력을 위한 요소의 CSS 선택자 또는 null", + "content_xpath": "글 내용 입력을 위한 요소의 XPath 또는 null" + }}, + {{ + "tag_input_css_selector": "태그 입력 필드의 CSS 선택자 또는 null", + "tag_input_xpath": "태그 입력 필드의 XPath 또는 null" + }}, + {{ + "complete_css_selector": "완료 버튼의 CSS 선택자 또는 null", + "complete_xpath": "완료 버튼의 XPath 또는 null" + }} + ] + }} + + # 분석할 HTML: + {html} + """ + + def tistory_publish_extraction_prompt(self, html: str): + """ + 티스토리 발행 관련 요소들 (공개 라디오, 발행 버튼) 추출 프롬프트 + 완료 버튼 클릭 후 동적으로 생성되는 요소들을 찾기 위한 프롬프트 + :param html: 분석할 HTML (완료 버튼 클릭 후 업데이트된 HTML) + :return: 프롬프트 문자열 + """ + return f""" + # 지시 (Instructions): + 1. 당신은 HTML에서 웹 자동화에 필요한 정확한 요소를 찾는 전문가입니다. + 2. 당신의 임무는 목표(Goal)와 가장 일치하는 요소에 대한 CSS Selector와 XPath를 정확하게 찾아내어 지정된 JSON 형식으로 반환하는 것입니다. + + # 규칙 (Rules): + 1. 만약 요청한 요소가 HTML 문서에 존재하지 않는다면, 반드시 {{"found": false}} 만 반환해야 합니다. + 2. 억지로 추측하거나 존재하지 않는 요소에 대한 정보를 생성하지 마세요. + 3. CSS 선택자에서 Selenium이 지원하지 않는 문법을 사용하지 마세요: + - :contains() 선택자 금지 (jQuery 전용) + - :visible, :hidden 같은 jQuery 전용 선택자 금지 + - 표준 CSS 선택자만 사용 (id, class, attribute, tag 등) + + # 목표 (Goal): + + # 공개 radio 버튼 찾기: + "공개"가 포함된 radio 요소 찾기: + - input type="radio" 요소 우선 검색 + - HTML에서 "공개"라는 한글 텍스트를 포함한 모든 radio 버튼 + - 글의 공개/비공개 설정을 위한 라디오 버튼 + + # 발행 버튼 찾기: + "발행"이 포함된 버튼 요소 찾기: + - HTML에서 "발행"이라는 한글 텍스트를 포함한 모든 버튼 + - "게시", "Publish" 등의 유사한 텍스트도 포함 + - publish-btn, btn-publish 등의 id나 class를 가진 버튼 우선 검색 + - 이 버튼이 최종적으로 글을 발행하는 역할을 하는지 확인 + + # 반환 형식: + {{ + "found": true/false, + "elements": [ + {{ + "public_radio_css_selector": "공개 radio의 CSS 선택자 또는 null", + "public_radio_xpath": "공개 radio의 XPath 또는 null" + }}, + {{ + "publish_css_selector": "발행 버튼의 CSS 선택자 또는 null", + "publish_xpath": "발행 버튼의 XPath 또는 null" + }} + ] + }} + + # 분석할 HTML: + {html} + """ \ No newline at end of file diff --git a/apps/pre-processing-service/app/utils/preprocess_html.py b/apps/pre-processing-service/app/utils/preprocess_html.py new file mode 100644 index 00000000..8be0ae26 --- /dev/null +++ b/apps/pre-processing-service/app/utils/preprocess_html.py @@ -0,0 +1,166 @@ +from bs4 import BeautifulSoup, Comment +from selenium.webdriver.common.by import By +from selenium.webdriver.support import expected_conditions as EC +import re + +def preprocess_html(html_content): + """ + 보수적인 HTML 전처리 - 블로그 에디터 요소들을 더 잘 보존 + """ + soup = BeautifulSoup(html_content, 'html.parser') + + # 완전히 불필요한 태그들만 제거 (더 보수적) + unnecessary_tags = [ + 'script', # JavaScript 코드 + 'style', # CSS 스타일 + 'noscript', # JavaScript 비활성화 시 내용 + 'meta', # 메타데이터 + 'link', # 외부 리소스 링크 (중요한 것 제외) + 'head', # head 전체 + 'title', # 페이지 제목 + 'base', # base URL + ] + + for tag_name in unnecessary_tags: + for tag in soup.find_all(tag_name): + # link 태그 중 중요한 것은 보존 + if tag_name == 'link' and tag.get('rel') in ['stylesheet', 'icon']: + continue + tag.decompose() + + # HTML 주석 제거 + comments = soup.find_all(string=lambda text: isinstance(text, Comment)) + for comment in comments: + comment.extract() + + # display:none만 제거하고 다른 숨김 요소는 보존 + hidden_elements = soup.find_all(attrs={ + 'style': re.compile(r'display\s*:\s*none', re.I) + }) + for element in hidden_elements: + element.decompose() + + # 중요한 속성들을 더 포괄적으로 보존 + important_attributes = { + 'id', 'class', 'name', 'type', 'value', 'href', 'src', 'alt', 'title', + 'placeholder', 'role', 'aria-label', 'aria-describedby', 'aria-expanded', + 'onclick', 'onchange', 'onfocus', 'onblur', + 'disabled', 'readonly', 'required', 'checked', 'selected', 'hidden', + 'tabindex', 'contenteditable', # 이게 중요! + 'spellcheck', 'autocomplete', 'maxlength', 'minlength', + 'for', 'form', 'method', 'action', 'target' + } + + # 속성 제거를 더 보수적으로 수행 + for tag in soup.find_all(True): + attrs_to_remove = [] + for attr_name in tag.attrs.keys(): + # data-* 속성은 모두 보존 + if attr_name.startswith('data-'): + continue + # aria-* 속성도 모두 보존 + if attr_name.startswith('aria-'): + continue + # on* 이벤트 속성들도 보존 + if attr_name.startswith('on'): + continue + # 중요 속성이 아니면 제거 + if attr_name not in important_attributes: + attrs_to_remove.append(attr_name) + + for attr_name in attrs_to_remove: + del tag.attrs[attr_name] + + # 빈 태그 제거를 더 신중하게 수행 + interactive_tags = { + 'input', 'button', 'select', 'textarea', 'a', 'img', 'br', 'hr', + 'div', 'span' # div, span도 에디터 요소일 수 있으므로 보존 + } + + def remove_empty_tags_conservative(): + removed_any = True + iteration = 0 + while removed_any and iteration < 3: # 최대 3번만 반복 + removed_any = False + iteration += 1 + + for tag in soup.find_all(): + # 상호작용 가능한 태그는 건드리지 않음 + if tag.name in interactive_tags: + continue + + # contenteditable 속성이 있으면 보존 + if tag.get('contenteditable'): + continue + + # data-* 속성이 있으면 보존 (React 컴포넌트일 가능성) + if any(attr.startswith('data-') for attr in tag.attrs.keys()): + continue + + # 텍스트도 없고 자식 요소도 없으면 제거 + if not tag.get_text(strip=True) and not tag.find_all(): + tag.decompose() + removed_any = True + + remove_empty_tags_conservative() + + # 연속된 공백 정리 (더 보수적) + for text_node in soup.find_all(string=True): + if text_node.parent.name not in ['script', 'style']: + cleaned_text = re.sub(r'\s+', ' ', str(text_node)) + if cleaned_text != str(text_node): + text_node.replace_with(cleaned_text) + + html_list = _chunking_html(str(soup)) + return html_list + +def _chunking_html(html_content, chunk_size=50000): + """ + HTML을 지정된 크기로 분할하는 메서드 + :param html_content: 원본 HTML 문자열 + :param chunk_size: 각 청크의 최대 크기 (문자 수) + :return: HTML 청크 리스트 + """ + chunks = [] + for i in range(0, len(html_content), chunk_size): + chunks.append(html_content[i:i + chunk_size]) + return chunks + +def wait_for_tistory_editor_complete(driver, timeout=30): + """ + 티스토리 TinyMCE 에디터가 완전히 로드될 때까지 대기 + """ + from selenium.webdriver.support.ui import WebDriverWait + wait = WebDriverWait(driver, timeout) + + print("🎯 티스토리 에디터 로딩 대기 중...") + + # 1단계: 페이지 기본 로딩 + wait.until(lambda d: d.execute_script("return document.readyState") == "complete") + + # 2단계: TinyMCE 라이브러리 로딩 + wait.until(lambda d: d.execute_script("return typeof tinymce !== 'undefined'")) + + # 3단계: 에디터 인스턴스 초기화 + wait.until(lambda d: d.execute_script(""" + return tinymce.get('editor-tistory') && + tinymce.get('editor-tistory').initialized + """)) + + # 4단계: iframe 준비 + wait.until(EC.presence_of_element_located((By.ID, "editor-tistory_ifr"))) + + # 5단계: iframe 내부 document 준비 + wait.until(lambda d: d.execute_script(""" + try { + var editor = tinymce.get('editor-tistory'); + var doc = editor.getDoc(); + return doc && doc.readyState === 'complete'; + } catch (e) { + return false; + } + """)) + + print("✅ 티스토리 에디터 완전 로딩 완료!") + return True + From da3647db502c8ce081f830e18b613d4f6f235cc6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EA=B2=BD=EB=AF=BC?= <153978154+kakusiA@users.noreply.github.com> Date: Wed, 17 Sep 2025 10:29:57 +0900 Subject: [PATCH 36/57] =?UTF-8?q?refactor=20:=20FAST=20API=20Response=20?= =?UTF-8?q?=EB=A6=AC=ED=8E=99=ED=86=A0=EB=A7=81=20(#105)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactoring: 사용하지 않는 url 삭제 * refactoring: - keyword Response 리펙토링 - match Response 리펙토링 - search Response 리펙토링 - 공통 Response 리펙토링및 수정중 * refactor: product response 공통화 작업 * refactor: - service단 공통 response 적용 완료 - schemas.py 사용하지않은 파라미터 삭제 - api response result -> response_data로 명명규칙 공통화 완료 * feat: 공통 응답을위한 success,error,not_found util 개발 * chore: poetry의존성 추가 * test: response변경으로인한 테스트코드 수정 완료 * style: 코드 포맷팅 * chore: poetry.lock파일 update --------- Co-authored-by: kakusia --- .../app/api/endpoints/blog.py | 20 +- .../app/api/endpoints/keywords.py | 17 - .../app/api/endpoints/product.py | 24 +- .../app/model/schemas.py | 151 +++++-- .../app/service/crawl_service.py | 6 +- .../app/service/keyword_service.py | 41 +- .../app/service/match_service.py | 26 +- .../app/service/search_service.py | 26 +- .../app/service/similarity_service.py | 26 +- .../app/test/test_keyword.py | 14 +- .../app/test/test_match_service.py | 20 +- .../app/test/test_sadagu_crawl.py | 6 +- .../app/test/test_search_service.py | 16 +- .../app/test/test_similarity_service.py | 34 +- .../app/utils/response.py | 25 ++ apps/pre-processing-service/poetry.lock | 422 +++++++++++++++++- apps/pre-processing-service/pyproject.toml | 2 +- 17 files changed, 712 insertions(+), 164 deletions(-) create mode 100644 apps/pre-processing-service/app/utils/response.py diff --git a/apps/pre-processing-service/app/api/endpoints/blog.py b/apps/pre-processing-service/app/api/endpoints/blog.py index 85da62b2..68a23496 100644 --- a/apps/pre-processing-service/app/api/endpoints/blog.py +++ b/apps/pre-processing-service/app/api/endpoints/blog.py @@ -7,6 +7,7 @@ from ...service.blog.blogger_blog_post_adapter import ( BloggerBlogPostAdapter, ) # 수정된 import +from app.utils.response import Response router = APIRouter() @@ -36,44 +37,45 @@ async def publish(request: RequestBlogPublish): """ if request.tag == "naver": naver_service = NaverBlogPostService() - result = naver_service.post_content( + response_data = naver_service.post_content( title=request.post_title, content=request.post_content, tags=request.post_tags, ) - if not result: + if not response_data: raise CustomException( "네이버 블로그 포스팅에 실패했습니다.", status_code=500 ) - return ResponseBlogPublish(status="success", metadata=result) + + return Response.ok(response_data) elif request.tag == "tistory": tistory_service = TistoryBlogPostService() - result = tistory_service.post_content( + response_data = tistory_service.post_content( title=request.post_title, content=request.post_content, tags=request.post_tags, ) - if not result: + if not response_data: raise CustomException( "티스토리 블로그 포스팅에 실패했습니다.", status_code=500 ) - return ResponseBlogPublish(status="success", metadata=result) + return Response.ok(response_data) elif request.tag == "blogger": blogger_service = BloggerBlogPostAdapter() # 수정: Adapter 사용 - result = blogger_service.post_content( + response_data = blogger_service.post_content( title=request.post_title, content=request.post_content, tags=request.post_tags, ) - if not result: + if not response_data: raise CustomException( "블로거 블로그 포스팅에 실패했습니다.", status_code=500 ) - return ResponseBlogPublish(status="success", metadata=result) + return Response.ok(response_data) diff --git a/apps/pre-processing-service/app/api/endpoints/keywords.py b/apps/pre-processing-service/app/api/endpoints/keywords.py index a1028391..6c1627bd 100644 --- a/apps/pre-processing-service/app/api/endpoints/keywords.py +++ b/apps/pre-processing-service/app/api/endpoints/keywords.py @@ -6,11 +6,6 @@ router = APIRouter() -@router.get("/") -async def root(): - return {"message": "keyword API"} - - @router.post( "/search", response_model=ResponseNaverSearch, summary="네이버 키워드 검색" ) @@ -28,15 +23,3 @@ async def search(request: RequestNaverSearch): """ response_data = await keyword_search(request) return response_data - - -@router.post( - "/ssadagu/validate", - response_model=ResponseNaverSearch, - summary="사다구몰 키워드 검증", -) -async def ssadagu_validate(request: RequestNaverSearch): - """ - 사다구몰 키워드 검증 테스트용 엔드포인트 - """ - return ResponseNaverSearch() diff --git a/apps/pre-processing-service/app/api/endpoints/product.py b/apps/pre-processing-service/app/api/endpoints/product.py index ab309595..32a4dcbe 100644 --- a/apps/pre-processing-service/app/api/endpoints/product.py +++ b/apps/pre-processing-service/app/api/endpoints/product.py @@ -24,12 +24,12 @@ async def search(request: RequestSadaguSearch): """ try: search_service = SearchService() - result = await search_service.search_products(request) + response_data = await search_service.search_products(request) - if not result: + if not response_data: raise CustomException(500, "상품 검색에 실패했습니다.", "SEARCH_FAILED") - return result + return response_data except InvalidItemDataException as e: raise HTTPException(status_code=e.status_code, detail=e.detail) except Exception as e: @@ -43,12 +43,12 @@ async def match(request: RequestSadaguMatch): """ try: match_service = MatchService() - result = match_service.match_products(request) + response_data = match_service.match_products(request) - if not result: + if not response_data: raise CustomException(500, "상품 매칭에 실패했습니다.", "MATCH_FAILED") - return result + return response_data except InvalidItemDataException as e: raise HTTPException(status_code=e.status_code, detail=e.detail) except Exception as e: @@ -64,14 +64,14 @@ async def similarity(request: RequestSadaguSimilarity): """ try: similarity_service = SimilarityService() - result = similarity_service.select_product_by_similarity(request) + response_data = similarity_service.select_product_by_similarity(request) - if not result: + if not response_data: raise CustomException( 500, "유사도 분석에 실패했습니다.", "SIMILARITY_FAILED" ) - return result + return response_data except InvalidItemDataException as e: raise HTTPException(status_code=e.status_code, detail=e.detail) except Exception as e: @@ -87,12 +87,12 @@ async def crawl(body: RequestSadaguCrawl): """ try: crawl_service = CrawlService() - result = await crawl_service.crawl_product_detail(body) + response_data = await crawl_service.crawl_product_detail(body) - if not result: + if not response_data: raise CustomException(500, "상품 크롤링에 실패했습니다.", "CRAWL_FAILED") - return result + return response_data except InvalidItemDataException as e: raise HTTPException(status_code=e.status_code, detail=e.detail) except ItemNotFoundException as e: diff --git a/apps/pre-processing-service/app/model/schemas.py b/apps/pre-processing-service/app/model/schemas.py index 9581ad0f..18d0d99f 100644 --- a/apps/pre-processing-service/app/model/schemas.py +++ b/apps/pre-processing-service/app/model/schemas.py @@ -1,7 +1,10 @@ from datetime import datetime -from typing import Optional, List, Dict, Any +from typing import Optional, List, Dict, Any, TypeVar, Generic from pydantic import BaseModel, Field, HttpUrl +# 제네릭 타입 변수 정의 +T = TypeVar("T") + # 기본 요청 class RequestBase(BaseModel): @@ -9,46 +12,60 @@ class RequestBase(BaseModel): # 기본 응답 -class ResponseBase(BaseModel): +class ResponseBase(BaseModel, Generic[T]): + success: bool = Field(..., title="성공유무", description="true,false") + data: T = Field(..., title="응답 데이터") status: str = Field(..., title="상태", description="요청 처리 상태") - pass + message: str = Field(..., title="메시지", description="메시지입니다.") + + +# ============== 1단계: 네이버 키워드 추출 ============== -# 네이버 키워드 추출 class RequestNaverSearch(RequestBase): tag: str = Field(..., title="태그", description="데이터랩/스토어 태그 구분") - category: Optional[str] = Field( - None, title="카테고리", description="검색할 카테고리" - ) - start_date: Optional[str] = Field( - None, title="시작일", description="검색 시작 날짜 (YYYY-MM-DD)" - ) - end_date: Optional[str] = Field( - None, title="종료일", description="검색 종료 날짜 (YYYY-MM-DD)" - ) -class ResponseNaverSearch(ResponseBase): - category: Optional[str] = Field(None, title="카테고리", description="검색 카테고리") +# 응답 데이터 모델 +class NaverSearchData(BaseModel): keyword: str = Field(..., title="키워드", description="검색에 사용된 키워드") total_keyword: Dict[int, str] = Field( ..., title="총 키워드", description="키워드별 총 검색 결과" ) -# 2단계: 검색 +# 최종 응답 모델 +class ResponseNaverSearch(ResponseBase[NaverSearchData]): + """네이버 키워드 검색 API 응답""" + + pass + + +# ============== 2단계: 사다구 검색 ============== + + class RequestSadaguSearch(RequestBase): keyword: str = Field(..., title="검색 키워드", description="상품을 검색할 키워드") -class ResponseSadaguSearch(ResponseBase): +# 응답 데이터 모델 +class SadaguSearchData(BaseModel): keyword: str = Field(..., title="검색 키워드", description="검색에 사용된 키워드") search_results: List[Dict] = Field( ..., title="검색 결과", description="검색된 상품 목록" ) -# 3단계: 매칭 +# 최종 응답 모델 +class ResponseSadaguSearch(ResponseBase[SadaguSearchData]): + """사다구 상품 검색 API 응답""" + + pass + + +# ============== 3단계: 사다구 매칭 ============== + + class RequestSadaguMatch(RequestBase): keyword: str = Field(..., title="매칭 키워드", description="상품과 매칭할 키워드") search_results: List[Dict] = Field( @@ -56,14 +73,24 @@ class RequestSadaguMatch(RequestBase): ) -class ResponseSadaguMatch(ResponseBase): +# 응답 데이터 모델 +class SadaguMatchData(BaseModel): keyword: str = Field(..., title="매칭 키워드", description="매칭에 사용된 키워드") matched_products: List[Dict] = Field( ..., title="매칭된 상품", description="키워드와 매칭된 상품 목록" ) -# 4단계: 유사도 +# 최종 응답 모델 +class ResponseSadaguMatch(ResponseBase[SadaguMatchData]): + """사다구 상품 매칭 API 응답""" + + pass + + +# ============== 4단계: 사다구 유사도 ============== + + class RequestSadaguSimilarity(RequestBase): keyword: str = Field( ..., title="유사도 분석 키워드", description="유사도 분석할 키워드" @@ -78,7 +105,8 @@ class RequestSadaguSimilarity(RequestBase): ) -class ResponseSadaguSimilarity(ResponseBase): +# 응답 데이터 모델 +class SadaguSimilarityData(BaseModel): keyword: str = Field( ..., title="분석 키워드", description="유사도 분석에 사용된 키워드" ) @@ -90,7 +118,16 @@ class ResponseSadaguSimilarity(ResponseBase): ) -# 사다구몰 크롤링 +# 최종 응답 모델 +class ResponseSadaguSimilarity(ResponseBase[SadaguSimilarityData]): + """사다구 상품 유사도 분석 API 응답""" + + pass + + +# ============== 사다구몰 크롤링 ============== + + class RequestSadaguCrawl(RequestBase): tag: str = Field( ..., @@ -102,7 +139,8 @@ class RequestSadaguCrawl(RequestBase): ) -class ResponseSadaguCrawl(ResponseBase): +# 응답 데이터 모델 +class SadaguCrawlData(BaseModel): tag: str = Field(..., title="크롤링 태그", description="크롤링 유형 태그") product_url: str = Field(..., title="상품 URL", description="크롤링된 상품 URL") product_detail: Optional[Dict] = Field( @@ -113,30 +151,79 @@ class ResponseSadaguCrawl(ResponseBase): ) -# 블로그 콘텐츠 생성 -class RequestBlogCreate(RequestBase): +# 최종 응답 모델 +class ResponseSadaguCrawl(ResponseBase[SadaguCrawlData]): + """사다구몰 크롤링 API 응답""" + pass -class ResponseBlogCreate(ResponseBase): +# ============== 블로그 콘텐츠 생성 ============== + + +class RequestBlogCreate(RequestBase): + keyword: Optional[str] = Field( + None, title="키워드", description="콘텐츠 생성용 키워드" + ) + product_info: Optional[Dict] = Field( + None, title="상품 정보", description="블로그 콘텐츠에 포함할 상품 정보" + ) + content_type: Optional[str] = Field( + None, title="콘텐츠 타입", description="생성할 콘텐츠 유형" + ) + target_length: Optional[int] = Field( + None, title="목표 글자 수", description="생성할 콘텐츠의 목표 길이" + ) + + +# 응답 데이터 모델 +class BlogCreateData(BaseModel): + title: str = Field(..., title="블로그 제목", description="생성된 블로그 제목") + content: str = Field(..., title="블로그 내용", description="생성된 블로그 내용") + tags: List[str] = Field( + default_factory=list, title="추천 태그", description="콘텐츠에 적합한 태그 목록" + ) + + +# 최종 응답 모델 +class ResponseBlogCreate(ResponseBase[BlogCreateData]): + """블로그 콘텐츠 생성 API 응답""" + pass -# 블로그 배포 +# ============== 블로그 배포 ============== + + class RequestBlogPublish(RequestBase): tag: str = Field(..., title="블로그 태그", description="블로그 플랫폼 종류") blog_id: str = Field(..., description="블로그 아이디") blog_pw: str = Field(..., description="블로그 비밀번호") post_title: str = Field(..., description="포스팅 제목") post_content: str = Field(..., description="포스팅 내용") - post_tags: List[str] = Field(default=[], description="포스팅 태그 목록") + post_tags: List[str] = Field(default_factory=list, description="포스팅 태그 목록") -class ResponseBlogPublish(ResponseBase): - # 디버깅 용 +# 응답 데이터 모델 +class BlogPublishData(BaseModel): + tag: str = Field(..., title="블로그 태그", description="블로그 플랫폼 종류") + post_title: str = Field(..., title="포스팅 제목", description="배포된 포스팅 제목") + post_url: Optional[str] = Field( + None, title="포스팅 URL", description="배포된 포스팅 URL" + ) + published_at: Optional[str] = Field( + None, title="배포 시간", description="포스팅 배포 완료 시간" + ) + publish_success: bool = Field(..., title="배포 성공 여부") + + # 디버깅 용 (Optional로 변경) metadata: Optional[Dict[str, Any]] = Field( None, description="포스팅 관련 메타데이터" ) - # 프로덕션 용 - # post_url: str = Field(..., description="포스팅 URL") + +# 최종 응답 모델 +class ResponseBlogPublish(ResponseBase[BlogPublishData]): + """블로그 배포 API 응답""" + + pass diff --git a/apps/pre-processing-service/app/service/crawl_service.py b/apps/pre-processing-service/app/service/crawl_service.py index 548df05d..f54d4db7 100644 --- a/apps/pre-processing-service/app/service/crawl_service.py +++ b/apps/pre-processing-service/app/service/crawl_service.py @@ -3,6 +3,7 @@ from app.errors.CustomException import InvalidItemDataException from app.model.schemas import RequestSadaguCrawl from loguru import logger +from app.utils.response import Response class CrawlService: @@ -36,16 +37,15 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: ) # 응답 데이터 구성 - response_data = { + data = { "tag": request.tag, "product_url": str(request.product_url), "product_detail": product_detail, - "status": "success", "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), } logger.info(f"상품 상세 크롤링 서비스 완료: status=success") - return response_data + return Response.ok(data) except Exception as e: logger.error( diff --git a/apps/pre-processing-service/app/service/keyword_service.py b/apps/pre-processing-service/app/service/keyword_service.py index f8065fa3..01d61f37 100644 --- a/apps/pre-processing-service/app/service/keyword_service.py +++ b/apps/pre-processing-service/app/service/keyword_service.py @@ -1,11 +1,11 @@ import json import random - +from app.utils.response import Response import httpx -from starlette import status from ..errors.CustomException import InvalidItemDataException from ..model.schemas import RequestNaverSearch +from datetime import date, timedelta async def keyword_search(request: RequestNaverSearch) -> dict: @@ -16,9 +16,7 @@ async def keyword_search(request: RequestNaverSearch) -> dict: # 키워드 검색 if request.tag == "naver": - trending_keywords = await search_naver_rank( - **request.model_dump(include={"category", "start_date", "end_date"}) - ) + trending_keywords = await search_naver_rank() elif request.tag == "naver_store": trending_keywords = await search_naver_store() else: @@ -27,14 +25,14 @@ async def keyword_search(request: RequestNaverSearch) -> dict: if not trending_keywords: raise InvalidItemDataException() - response_data = request.model_dump() - response_data["keyword"] = random.choice(list(trending_keywords.values())) - response_data["total_keyword"] = trending_keywords - response_data["status"] = "success" - return response_data + data = { + "keyword": random.choice(list(trending_keywords.values())), + "total_keyword": trending_keywords, + } + return Response.ok(data) -async def search_naver_rank(category, start_date, end_date) -> dict[int, str]: +async def search_naver_rank() -> dict[int, str]: """ 네이버 데이터 랩 키워드 검색 모듈 """ @@ -44,6 +42,26 @@ async def search_naver_rank(category, start_date, end_date) -> dict[int, str]: "Referer": "https://datalab.naver.com/shoppingInsight/sCategory.naver", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36", } + categorys = [ + "50000000", + "50000001", + "50000002", + "50000003", + "50000004", + "50000005", + "50000006", + "50000007", + "50000008", + "50000009", + ] + category = random.choice(categorys) + today = date.today() + yesterday = today - timedelta(days=1) + + # 3. 원하는 포맷(YYYY-MM-DD)으로 변환하기 + end_date = today.strftime("%Y-%m-%d") + start_date = yesterday.strftime("%Y-%m-%d") + keywords_dic = {} async with httpx.AsyncClient() as client: for page in range(1, 3): @@ -80,7 +98,6 @@ async def search_naver_store() -> dict[int, str]: """ url = "https://snxbest.naver.com/api/v1/snxbest/keyword/rank?ageType=ALL&categoryId=A&sortType=KEYWORD_POPULAR&periodType=DAILY" headers = {} - async with httpx.AsyncClient() as client: try: # API에 GET 요청을 보냅니다. diff --git a/apps/pre-processing-service/app/service/match_service.py b/apps/pre-processing-service/app/service/match_service.py index 9f340683..bb6e88e2 100644 --- a/apps/pre-processing-service/app/service/match_service.py +++ b/apps/pre-processing-service/app/service/match_service.py @@ -2,6 +2,7 @@ from app.errors.CustomException import InvalidItemDataException from ..model.schemas import RequestSadaguMatch from loguru import logger +from app.utils.response import Response class MatchService: @@ -15,21 +16,16 @@ def match_products(self, request: RequestSadaguMatch) -> dict: keyword = request.keyword products = request.search_results - logger.info( - # f"키워드 매칭 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}', products_count={len(products) if products else 0}" - f"keyword='{keyword}'" - ) + logger.info(f"키워드 매칭 서비스 시작: keyword='{keyword}'") if not products: logger.warning(f"매칭할 상품이 없음: keyword='{keyword}'") - return { - # "job_id": request.job_id, - # "schedule_id": request.schedule_id, - # "schedule_his_id": request.schedule_his_id, + + data = { "keyword": keyword, "matched_products": [], - "status": "success", } + return Response.ok(data, "매칭상품이 존재하지 않습니다.") try: matcher = KeywordMatcher() @@ -79,18 +75,12 @@ def match_products(self, request: RequestSadaguMatch) -> dict: logger.info( f"최고 매칭 상품: title='{best_match['title'][:30]}', score={best_match['match_info']['match_score']:.3f}" ) - - return { - # "job_id": request.job_id, - # "schedule_id": request.schedule_id, - # "schedule_his_id": request.schedule_his_id, + data = { "keyword": keyword, "matched_products": matched_products, - "status": "success", } + return Response.ok(data) except Exception as e: - logger.error( - # f"매칭 서비스 오류: job_id={request.job_id}, keyword='{keyword}', error='{e}'" - ) + logger.error(f"매칭 서비스 오류: error='{e}'") raise InvalidItemDataException() diff --git a/apps/pre-processing-service/app/service/search_service.py b/apps/pre-processing-service/app/service/search_service.py index a71d6a8d..171bd57f 100644 --- a/apps/pre-processing-service/app/service/search_service.py +++ b/apps/pre-processing-service/app/service/search_service.py @@ -2,6 +2,9 @@ from app.errors.CustomException import InvalidItemDataException from ..model.schemas import RequestSadaguSearch from loguru import logger +from app.utils.response import Response +from datetime import datetime +import time class SearchService: @@ -15,11 +18,11 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: keyword = request.keyword crawler = SearchCrawler(use_selenium=True) + # 시작 시간 기록 + start_time = time.time() + try: - logger.info( - # f"상품 검색 서비스 시작: job_id={request.job_id}, schedule_id={request.schedule_id}, keyword='{keyword}'" - f"keyword='{keyword}'" - ) + logger.info(f"keyword='{keyword}'") # Selenium 또는 httpx로 상품 검색 if crawler.use_selenium: @@ -29,11 +32,13 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: if not search_results: logger.warning(f"검색 결과가 없습니다: keyword='{keyword}'") - return { + + # SadaguSearchData 구조에 맞춰 response_data 생성 + data = { "keyword": keyword, "search_results": [], - "status": "success", } + return Response.ok(data, "검색 결과가 없습니다.") # 상품별 기본 정보 수집 (제목이 없는 경우 다시 크롤링) enriched_results = [] @@ -83,15 +88,20 @@ async def search_products(self, request: RequestSadaguSearch) -> dict: ) continue + # 검색 소요 시간 계산 + search_time_ms = int((time.time() - start_time) * 1000) + logger.info(f"검색 소요 시간 = {search_time_ms}") + logger.success( f"상품 검색 완료: keyword='{keyword}', 초기검색={len(search_results)}개, 최종유효상품={len(enriched_results)}개" ) - return { + # SadaguSearchData 구조에 맞춰 response_data 생성 + data = { "keyword": keyword, "search_results": enriched_results, - "status": "success", } + return Response.ok(data) except Exception as e: logger.error(f"검색 서비스 오류: keyword='{keyword}', error='{e}'") diff --git a/apps/pre-processing-service/app/service/similarity_service.py b/apps/pre-processing-service/app/service/similarity_service.py index c77aa8ba..516b0c63 100644 --- a/apps/pre-processing-service/app/service/similarity_service.py +++ b/apps/pre-processing-service/app/service/similarity_service.py @@ -2,6 +2,7 @@ from app.errors.CustomException import InvalidItemDataException from ..model.schemas import RequestSadaguSimilarity from loguru import logger +from app.utils.response import Response class SimilarityService: @@ -26,12 +27,13 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict logger.warning( f"매칭된 상품과 검색 결과가 모두 없음: keyword='{keyword}'" ) - return { + + data = { "keyword": keyword, "selected_product": None, "reason": "매칭된 상품과 검색 결과가 모두 없음", - "status": "success", } + return Response.ok(data, "매칭된 상품과 검색 결과가 모두 없습니다.") logger.info("매칭된 상품 없음 → 전체 검색 결과에서 유사도 분석") candidates = fallback_products @@ -63,12 +65,14 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict logger.warning( f"단일 상품 유사도 미달: similarity={similarity:.4f} < threshold={similarity_threshold}" ) - return { + data = { "keyword": keyword, "selected_product": None, "reason": f"단일 상품 유사도({similarity:.4f}) < 기준({similarity_threshold})", - "status": "success", } + return Response.ok( + data, "단일 상품 유사도 미달 되어 상품이 존재하지않습니다." + ) selected_product["similarity_info"] = { "similarity_score": float(similarity), @@ -79,13 +83,12 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict logger.success( f"단일 상품 선택 완료: title='{selected_product['title'][:30]}', similarity={similarity:.4f}" ) - - return { + data = { "keyword": keyword, "selected_product": selected_product, "reason": f"단일 상품 - 유사도: {similarity:.4f} ({analysis_mode})", - "status": "success", } + return Response.ok(data) # 여러 개가 있으면 유사도 비교 logger.info("여러 상품 중 최고 유사도로 선택...") @@ -114,12 +117,12 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict logger.warning( f"최고 유사도 미달: similarity={best_result['similarity']:.4f} < threshold={similarity_threshold}" ) - return { + data = { "keyword": keyword, "selected_product": None, "reason": f"최고 유사도({best_result['similarity']:.4f}) < 기준({similarity_threshold})", - "status": "success", } + return Response.ok(data, "최고 유사도가 기준보다 미달 되었습니다.") # 유사도 정보 추가 selected_product["similarity_info"] = { @@ -147,13 +150,12 @@ def select_product_by_similarity(self, request: RequestSadaguSimilarity) -> dict logger.success( f"상품 선택 완료: title='{selected_product['title'][:30]}', {reason}" ) - - return { + data = { "keyword": keyword, "selected_product": selected_product, "reason": reason, - "status": "success", } + return Response.ok(data) except Exception as e: logger.error(f"유사도 분석 서비스 오류: keyword='{keyword}', error='{e}'") diff --git a/apps/pre-processing-service/app/test/test_keyword.py b/apps/pre-processing-service/app/test/test_keyword.py index 11bd69fa..82daefc8 100644 --- a/apps/pre-processing-service/app/test/test_keyword.py +++ b/apps/pre-processing-service/app/test/test_keyword.py @@ -1,16 +1,11 @@ import pytest from fastapi.testclient import TestClient from app.main import app +from app.utils.response import Response client = TestClient(app) -def test_read_root(): - response = client.get("/keywords/") - assert response.status_code == 200 - assert response.json() == {"message": "keyword API"} - - @pytest.mark.parametrize( "tag, category, start_date, end_date", [ @@ -32,6 +27,7 @@ def test_search(tag, category, start_date, end_date): assert response.status_code == 200 response_data = response.json() - assert response_data["status"] == "success" - assert "keyword" in response_data - assert isinstance(response_data["total_keyword"], dict) + assert response_data["success"] == True + assert response_data["status"] == "OK" + assert "keyword" in response_data["data"] + assert isinstance(response_data["data"]["total_keyword"], dict) diff --git a/apps/pre-processing-service/app/test/test_match_service.py b/apps/pre-processing-service/app/test/test_match_service.py index 3f50ffad..7deb043c 100644 --- a/apps/pre-processing-service/app/test/test_match_service.py +++ b/apps/pre-processing-service/app/test/test_match_service.py @@ -32,13 +32,14 @@ def test_match_success(): assert response.status_code == 200 data = response.json() - assert data["keyword"] == body["keyword"] - assert data["status"] == "success" - assert isinstance(data["matched_products"], list) + assert data["success"] == True + assert data["status"] == "OK" + assert data["data"]["keyword"] == body["keyword"] + assert isinstance(data["data"]["matched_products"], list) # 반지가 포함된 상품들이 매칭되어야 함 - if data["matched_products"]: - for product in data["matched_products"]: + if data["data"]["matched_products"]: + for product in data["data"]["matched_products"]: assert "match_info" in product assert "match_type" in product["match_info"] assert "match_score" in product["match_info"] @@ -56,7 +57,9 @@ def test_match_no_results(): assert response.status_code == 200 data = response.json() - assert data["matched_products"] == [] + assert data["success"] == True + assert data["status"] == "OK" + assert data["data"]["matched_products"] == [] def test_match_no_matches(): @@ -83,5 +86,6 @@ def test_match_no_matches(): assert response.status_code == 200 data = response.json() # 매칭되지 않아도 성공으로 처리 - assert data["status"] == "success" - assert isinstance(data["matched_products"], list) + assert data["success"] == True + assert data["status"] == "OK" + assert isinstance(data["data"]["matched_products"], list) diff --git a/apps/pre-processing-service/app/test/test_sadagu_crawl.py b/apps/pre-processing-service/app/test/test_sadagu_crawl.py index b419b5c6..72e4f0df 100644 --- a/apps/pre-processing-service/app/test/test_sadagu_crawl.py +++ b/apps/pre-processing-service/app/test/test_sadagu_crawl.py @@ -18,8 +18,10 @@ def test_crawl_success(): assert response.status_code == 200 data = response.json() - assert data["product_url"] == body["product_url"] - assert "product_detail" in data + assert data["success"] == True + assert data["status"] == "OK" + assert data["data"]["product_url"] == body["product_url"] + assert "product_detail" in data["data"] # def test_crawl_invalid_url(): diff --git a/apps/pre-processing-service/app/test/test_search_service.py b/apps/pre-processing-service/app/test/test_search_service.py index d5d3a618..7ee32252 100644 --- a/apps/pre-processing-service/app/test/test_search_service.py +++ b/apps/pre-processing-service/app/test/test_search_service.py @@ -14,9 +14,10 @@ def test_search_success(): assert response.status_code == 200 data = response.json() - assert data["keyword"] == body["keyword"] - assert data["status"] == "success" - assert isinstance(data["search_results"], list) + assert data["success"] == True + assert data["status"] == "OK" + assert data["data"]["keyword"] == body["keyword"] + assert isinstance(data["data"]["search_results"], list) def test_search_empty_keyword(): @@ -29,7 +30,9 @@ def test_search_empty_keyword(): # 빈 키워드라도 에러가 아닌 빈 결과를 반환해야 함 assert response.status_code == 200 data = response.json() - assert data["search_results"] == [] + assert data["success"] == True + assert data["status"] == "OK" + assert data["data"]["search_results"] == [] def test_search_nonexistent_keyword(): @@ -44,5 +47,6 @@ def test_search_nonexistent_keyword(): assert response.status_code == 200 data = response.json() # 검색 결과가 없어도 성공으로 처리 - assert data["status"] == "success" - assert isinstance(data["search_results"], list) + assert data["success"] == True + assert data["status"] == "OK" + assert isinstance(data["data"]["search_results"], list) diff --git a/apps/pre-processing-service/app/test/test_similarity_service.py b/apps/pre-processing-service/app/test/test_similarity_service.py index 5eeba78d..6efbcdc1 100644 --- a/apps/pre-processing-service/app/test/test_similarity_service.py +++ b/apps/pre-processing-service/app/test/test_similarity_service.py @@ -38,13 +38,14 @@ def test_similarity_with_matched_products(): assert response.status_code == 200 data = response.json() - assert data["keyword"] == body["keyword"] - assert data["status"] == "success" + assert data["success"] == True + assert data["status"] == "OK" + assert data["data"]["keyword"] == body["keyword"] - if data["selected_product"]: - assert "similarity_info" in data["selected_product"] - assert "similarity_score" in data["selected_product"]["similarity_info"] - assert data["reason"] is not None + if data["data"]["selected_product"]: + assert "similarity_info" in data["data"]["selected_product"] + assert "similarity_score" in data["data"]["selected_product"]["similarity_info"] + assert data["data"]["reason"] is not None def test_similarity_fallback_to_search_results(): @@ -71,13 +72,14 @@ def test_similarity_fallback_to_search_results(): assert response.status_code == 200 data = response.json() - assert data["status"] == "success" + assert data["success"] == True + assert data["status"] == "OK" # 폴백 모드에서는 임계값을 통과한 경우에만 상품이 선택됨 - if data["selected_product"]: - assert "similarity_info" in data["selected_product"] + if data["data"]["selected_product"]: + assert "similarity_info" in data["data"]["selected_product"] assert ( - data["selected_product"]["similarity_info"]["analysis_mode"] + data["data"]["selected_product"]["similarity_info"]["analysis_mode"] == "fallback_similarity_only" ) @@ -102,9 +104,11 @@ def test_similarity_single_candidate(): assert response.status_code == 200 data = response.json() - assert data["selected_product"] is not None + assert data["success"] == True + assert data["status"] == "OK" + assert data["data"]["selected_product"] is not None assert ( - data["selected_product"]["similarity_info"]["analysis_type"] + data["data"]["selected_product"]["similarity_info"]["analysis_type"] == "single_candidate" ) @@ -122,5 +126,7 @@ def test_similarity_no_candidates(): assert response.status_code == 200 data = response.json() - assert data["selected_product"] is None - assert "검색 결과가 모두 없음" in data["reason"] + assert data["success"] == True + assert data["status"] == "OK" + assert data["data"]["selected_product"] is None + assert "검색 결과가 모두 없음" in data["data"]["reason"] diff --git a/apps/pre-processing-service/app/utils/response.py b/apps/pre-processing-service/app/utils/response.py new file mode 100644 index 00000000..305f080e --- /dev/null +++ b/apps/pre-processing-service/app/utils/response.py @@ -0,0 +1,25 @@ +class Response: + @staticmethod + def ok(data: dict, message: str = "OK") -> dict: + """성공 응답""" + return {"success": True, "data": data, "status": "OK", "message": message} + + @staticmethod + def error(message: str = "오류가 발생했습니다", data: dict = None) -> dict: + """에러 응답""" + return { + "success": False, + "data": data or {}, + "status": "ERROR", + "message": message, + } + + @staticmethod + def not_found(message: str = "결과를 찾을 수 없습니다", data: dict = None) -> dict: + """검색 결과 없음""" + return { + "success": True, # 에러가 아닌 정상 처리 + "data": data or {}, + "status": "NOT_FOUND", + "message": message, + } diff --git a/apps/pre-processing-service/poetry.lock b/apps/pre-processing-service/poetry.lock index d47e6783..bb05b60a 100644 --- a/apps/pre-processing-service/poetry.lock +++ b/apps/pre-processing-service/poetry.lock @@ -507,6 +507,18 @@ all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (> standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] +[[package]] +name = "filelock" +version = "3.19.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d"}, + {file = "filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58"}, +] + [[package]] name = "flatbuffers" version = "25.2.10" @@ -519,6 +531,46 @@ files = [ {file = "flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e"}, ] +[[package]] +name = "fsspec" +version = "2025.9.0" +description = "File-system specification" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "fsspec-2025.9.0-py3-none-any.whl", hash = "sha256:530dc2a2af60a414a832059574df4a6e10cce927f6f4a78209390fe38955cfb7"}, + {file = "fsspec-2025.9.0.tar.gz", hash = "sha256:19fd429483d25d28b65ec68f9f4adc16c17ea2c7c7bf54ec61360d478fb19c19"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +dev = ["pre-commit", "ruff (>=0.5)"] +doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] +test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] +test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard ; python_version < \"3.14\""] +tqdm = ["tqdm"] + [[package]] name = "google" version = "3.0.0" @@ -764,6 +816,28 @@ files = [ {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, ] +[[package]] +name = "hf-xet" +version = "1.1.10" +description = "Fast transfer of large files with the Hugging Face Hub." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\"" +files = [ + {file = "hf_xet-1.1.10-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:686083aca1a6669bc85c21c0563551cbcdaa5cf7876a91f3d074a030b577231d"}, + {file = "hf_xet-1.1.10-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:71081925383b66b24eedff3013f8e6bbd41215c3338be4b94ba75fd75b21513b"}, + {file = "hf_xet-1.1.10-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6bceb6361c80c1cc42b5a7b4e3efd90e64630bcf11224dcac50ef30a47e435"}, + {file = "hf_xet-1.1.10-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eae7c1fc8a664e54753ffc235e11427ca61f4b0477d757cc4eb9ae374b69f09c"}, + {file = "hf_xet-1.1.10-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0a0005fd08f002180f7a12d4e13b22be277725bc23ed0529f8add5c7a6309c06"}, + {file = "hf_xet-1.1.10-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f900481cf6e362a6c549c61ff77468bd59d6dd082f3170a36acfef2eb6a6793f"}, + {file = "hf_xet-1.1.10-cp37-abi3-win_amd64.whl", hash = "sha256:5f54b19cc347c13235ae7ee98b330c26dd65ef1df47e5316ffb1e87713ca7045"}, + {file = "hf_xet-1.1.10.tar.gz", hash = "sha256:408aef343800a2102374a883f283ff29068055c111f003ff840733d3b715bb97"}, +] + +[package.extras] +tests = ["pytest"] + [[package]] name = "httpcore" version = "1.0.9" @@ -826,6 +900,45 @@ http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "huggingface-hub" +version = "0.35.0" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "huggingface_hub-0.35.0-py3-none-any.whl", hash = "sha256:f2e2f693bca9a26530b1c0b9bcd4c1495644dad698e6a0060f90e22e772c31e9"}, + {file = "huggingface_hub-0.35.0.tar.gz", hash = "sha256:ccadd2a78eef75effff184ad89401413629fabc52cefd76f6bbacb9b1c0676ac"}, +] + +[package.dependencies] +filelock = "*" +fsspec = ">=2023.5.0" +hf-xet = {version = ">=1.1.3,<2.0.0", markers = "platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\""} +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = ">=4.42.1" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "authlib (>=1.3.2)", "fastapi", "gradio (>=4.0.0)", "httpx", "itsdangerous", "jedi", "libcst (>=1.4.0)", "mypy (==1.15.0) ; python_version >= \"3.9\"", "mypy (>=1.14.1,<1.15.0) ; python_version == \"3.8\"", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures (<16.0)", "pytest-vcr", "pytest-xdist", "ruff (>=0.9.0)", "soundfile", "ty", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "authlib (>=1.3.2)", "fastapi", "gradio (>=4.0.0)", "httpx", "itsdangerous", "jedi", "libcst (>=1.4.0)", "mypy (==1.15.0) ; python_version >= \"3.9\"", "mypy (>=1.14.1,<1.15.0) ; python_version == \"3.8\"", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures (<16.0)", "pytest-vcr", "pytest-xdist", "ruff (>=0.9.0)", "soundfile", "ty", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +hf-transfer = ["hf-transfer (>=0.1.4)"] +hf-xet = ["hf-xet (>=1.1.2,<2.0.0)"] +inference = ["aiohttp"] +mcp = ["aiohttp", "mcp (>=1.8.0)", "typer"] +oauth = ["authlib (>=1.3.2)", "fastapi", "httpx", "itsdangerous"] +quality = ["libcst (>=1.4.0)", "mypy (==1.15.0) ; python_version >= \"3.9\"", "mypy (>=1.14.1,<1.15.0) ; python_version == \"3.8\"", "ruff (>=0.9.0)", "ty"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +tensorflow-testing = ["keras (<3.0)", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "authlib (>=1.3.2)", "fastapi", "gradio (>=4.0.0)", "httpx", "itsdangerous", "jedi", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures (<16.0)", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors[torch]", "torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] + [[package]] name = "humanfriendly" version = "10.0" @@ -1755,6 +1868,166 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "regex" +version = "2025.9.1" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "regex-2025.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c5aa2a6a73bf218515484b36a0d20c6ad9dc63f6339ff6224147b0e2c095ee55"}, + {file = "regex-2025.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c2ff5c01d5e47ad5fc9d31bcd61e78c2fa0068ed00cab86b7320214446da766"}, + {file = "regex-2025.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d49dc84e796b666181de8a9973284cad6616335f01b52bf099643253094920fc"}, + {file = "regex-2025.9.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9914fe1040874f83c15fcea86d94ea54091b0666eab330aaab69e30d106aabe"}, + {file = "regex-2025.9.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e71bceb3947362ec5eabd2ca0870bb78eae4edfc60c6c21495133c01b6cd2df4"}, + {file = "regex-2025.9.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:67a74456f410fe5e869239ee7a5423510fe5121549af133809d9591a8075893f"}, + {file = "regex-2025.9.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5c3b96ed0223b32dbdc53a83149b6de7ca3acd5acd9c8e64b42a166228abe29c"}, + {file = "regex-2025.9.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:113d5aa950f428faf46fd77d452df62ebb4cc6531cb619f6cc30a369d326bfbd"}, + {file = "regex-2025.9.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fcdeb38de4f7f3d69d798f4f371189061446792a84e7c92b50054c87aae9c07c"}, + {file = "regex-2025.9.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4bcdff370509164b67a6c8ec23c9fb40797b72a014766fdc159bb809bd74f7d8"}, + {file = "regex-2025.9.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:7383efdf6e8e8c61d85e00cfb2e2e18da1a621b8bfb4b0f1c2747db57b942b8f"}, + {file = "regex-2025.9.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1ec2bd3bdf0f73f7e9f48dca550ba7d973692d5e5e9a90ac42cc5f16c4432d8b"}, + {file = "regex-2025.9.1-cp310-cp310-win32.whl", hash = "sha256:9627e887116c4e9c0986d5c3b4f52bcfe3df09850b704f62ec3cbf177a0ae374"}, + {file = "regex-2025.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:94533e32dc0065eca43912ee6649c90ea0681d59f56d43c45b5bcda9a740b3dd"}, + {file = "regex-2025.9.1-cp310-cp310-win_arm64.whl", hash = "sha256:a874a61bb580d48642ffd338570ee24ab13fa023779190513fcacad104a6e251"}, + {file = "regex-2025.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e5bcf112b09bfd3646e4db6bf2e598534a17d502b0c01ea6550ba4eca780c5e6"}, + {file = "regex-2025.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:67a0295a3c31d675a9ee0238d20238ff10a9a2fdb7a1323c798fc7029578b15c"}, + {file = "regex-2025.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea8267fbadc7d4bd7c1301a50e85c2ff0de293ff9452a1a9f8d82c6cafe38179"}, + {file = "regex-2025.9.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6aeff21de7214d15e928fb5ce757f9495214367ba62875100d4c18d293750cc1"}, + {file = "regex-2025.9.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d89f1bbbbbc0885e1c230f7770d5e98f4f00b0ee85688c871d10df8b184a6323"}, + {file = "regex-2025.9.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ca3affe8ddea498ba9d294ab05f5f2d3b5ad5d515bc0d4a9016dd592a03afe52"}, + {file = "regex-2025.9.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91892a7a9f0a980e4c2c85dd19bc14de2b219a3a8867c4b5664b9f972dcc0c78"}, + {file = "regex-2025.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e1cb40406f4ae862710615f9f636c1e030fd6e6abe0e0f65f6a695a2721440c6"}, + {file = "regex-2025.9.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94f6cff6f7e2149c7e6499a6ecd4695379eeda8ccbccb9726e8149f2fe382e92"}, + {file = "regex-2025.9.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6c0226fb322b82709e78c49cc33484206647f8a39954d7e9de1567f5399becd0"}, + {file = "regex-2025.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a12f59c7c380b4fcf7516e9cbb126f95b7a9518902bcf4a852423ff1dcd03e6a"}, + {file = "regex-2025.9.1-cp311-cp311-win32.whl", hash = "sha256:49865e78d147a7a4f143064488da5d549be6bfc3f2579e5044cac61f5c92edd4"}, + {file = "regex-2025.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:d34b901f6f2f02ef60f4ad3855d3a02378c65b094efc4b80388a3aeb700a5de7"}, + {file = "regex-2025.9.1-cp311-cp311-win_arm64.whl", hash = "sha256:47d7c2dab7e0b95b95fd580087b6ae196039d62306a592fa4e162e49004b6299"}, + {file = "regex-2025.9.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:84a25164bd8dcfa9f11c53f561ae9766e506e580b70279d05a7946510bdd6f6a"}, + {file = "regex-2025.9.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:645e88a73861c64c1af558dd12294fb4e67b5c1eae0096a60d7d8a2143a611c7"}, + {file = "regex-2025.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10a450cba5cd5409526ee1d4449f42aad38dd83ac6948cbd6d7f71ca7018f7db"}, + {file = "regex-2025.9.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9dc5991592933a4192c166eeb67b29d9234f9c86344481173d1bc52f73a7104"}, + {file = "regex-2025.9.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a32291add816961aab472f4fad344c92871a2ee33c6c219b6598e98c1f0108f2"}, + {file = "regex-2025.9.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:588c161a68a383478e27442a678e3b197b13c5ba51dbba40c1ccb8c4c7bee9e9"}, + {file = "regex-2025.9.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47829ffaf652f30d579534da9085fe30c171fa2a6744a93d52ef7195dc38218b"}, + {file = "regex-2025.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e978e5a35b293ea43f140c92a3269b6ab13fe0a2bf8a881f7ac740f5a6ade85"}, + {file = "regex-2025.9.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf09903e72411f4bf3ac1eddd624ecfd423f14b2e4bf1c8b547b72f248b7bf7"}, + {file = "regex-2025.9.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d016b0f77be63e49613c9e26aaf4a242f196cd3d7a4f15898f5f0ab55c9b24d2"}, + {file = "regex-2025.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:656563e620de6908cd1c9d4f7b9e0777e3341ca7db9d4383bcaa44709c90281e"}, + {file = "regex-2025.9.1-cp312-cp312-win32.whl", hash = "sha256:df33f4ef07b68f7ab637b1dbd70accbf42ef0021c201660656601e8a9835de45"}, + {file = "regex-2025.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:5aba22dfbc60cda7c0853516104724dc904caa2db55f2c3e6e984eb858d3edf3"}, + {file = "regex-2025.9.1-cp312-cp312-win_arm64.whl", hash = "sha256:ec1efb4c25e1849c2685fa95da44bfde1b28c62d356f9c8d861d4dad89ed56e9"}, + {file = "regex-2025.9.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bc6834727d1b98d710a63e6c823edf6ffbf5792eba35d3fa119531349d4142ef"}, + {file = "regex-2025.9.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c3dc05b6d579875719bccc5f3037b4dc80433d64e94681a0061845bd8863c025"}, + {file = "regex-2025.9.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22213527df4c985ec4a729b055a8306272d41d2f45908d7bacb79be0fa7a75ad"}, + {file = "regex-2025.9.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8e3f6e3c5a5a1adc3f7ea1b5aec89abfc2f4fbfba55dafb4343cd1d084f715b2"}, + {file = "regex-2025.9.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bcb89c02a0d6c2bec9b0bb2d8c78782699afe8434493bfa6b4021cc51503f249"}, + {file = "regex-2025.9.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b0e2f95413eb0c651cd1516a670036315b91b71767af83bc8525350d4375ccba"}, + {file = "regex-2025.9.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:09a41dc039e1c97d3c2ed3e26523f748e58c4de3ea7a31f95e1cf9ff973fff5a"}, + {file = "regex-2025.9.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f0b4258b161094f66857a26ee938d3fe7b8a5063861e44571215c44fbf0e5df"}, + {file = "regex-2025.9.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bf70e18ac390e6977ea7e56f921768002cb0fa359c4199606c7219854ae332e0"}, + {file = "regex-2025.9.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b84036511e1d2bb0a4ff1aec26951caa2dea8772b223c9e8a19ed8885b32dbac"}, + {file = "regex-2025.9.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c2e05dcdfe224047f2a59e70408274c325d019aad96227ab959403ba7d58d2d7"}, + {file = "regex-2025.9.1-cp313-cp313-win32.whl", hash = "sha256:3b9a62107a7441b81ca98261808fed30ae36ba06c8b7ee435308806bd53c1ed8"}, + {file = "regex-2025.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:b38afecc10c177eb34cfae68d669d5161880849ba70c05cbfbe409f08cc939d7"}, + {file = "regex-2025.9.1-cp313-cp313-win_arm64.whl", hash = "sha256:ec329890ad5e7ed9fc292858554d28d58d56bf62cf964faf0aa57964b21155a0"}, + {file = "regex-2025.9.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:72fb7a016467d364546f22b5ae86c45680a4e0de6b2a6f67441d22172ff641f1"}, + {file = "regex-2025.9.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c9527fa74eba53f98ad86be2ba003b3ebe97e94b6eb2b916b31b5f055622ef03"}, + {file = "regex-2025.9.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c905d925d194c83a63f92422af7544ec188301451b292c8b487f0543726107ca"}, + {file = "regex-2025.9.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:74df7c74a63adcad314426b1f4ea6054a5ab25d05b0244f0c07ff9ce640fa597"}, + {file = "regex-2025.9.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4f6e935e98ea48c7a2e8be44494de337b57a204470e7f9c9c42f912c414cd6f5"}, + {file = "regex-2025.9.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4a62d033cd9ebefc7c5e466731a508dfabee827d80b13f455de68a50d3c2543d"}, + {file = "regex-2025.9.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef971ebf2b93bdc88d8337238be4dfb851cc97ed6808eb04870ef67589415171"}, + {file = "regex-2025.9.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d936a1db208bdca0eca1f2bb2c1ba1d8370b226785c1e6db76e32a228ffd0ad5"}, + {file = "regex-2025.9.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:7e786d9e4469698fc63815b8de08a89165a0aa851720eb99f5e0ea9d51dd2b6a"}, + {file = "regex-2025.9.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:6b81d7dbc5466ad2c57ce3a0ddb717858fe1a29535c8866f8514d785fdb9fc5b"}, + {file = "regex-2025.9.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cd4890e184a6feb0ef195338a6ce68906a8903a0f2eb7e0ab727dbc0a3156273"}, + {file = "regex-2025.9.1-cp314-cp314-win32.whl", hash = "sha256:34679a86230e46164c9e0396b56cab13c0505972343880b9e705083cc5b8ec86"}, + {file = "regex-2025.9.1-cp314-cp314-win_amd64.whl", hash = "sha256:a1196e530a6bfa5f4bde029ac5b0295a6ecfaaffbfffede4bbaf4061d9455b70"}, + {file = "regex-2025.9.1-cp314-cp314-win_arm64.whl", hash = "sha256:f46d525934871ea772930e997d577d48c6983e50f206ff7b66d4ac5f8941e993"}, + {file = "regex-2025.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a13d20007dce3c4b00af5d84f6c191ed1c0f70928c6d9b6cd7b8d2f125df7f46"}, + {file = "regex-2025.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d6b046b0a01cb713fd53ef36cb59db4b0062b343db28e83b52ac6aa01ee5b368"}, + {file = "regex-2025.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0fa9a7477288717f42dbd02ff5d13057549e9a8cdb81f224c313154cc10bab52"}, + {file = "regex-2025.9.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2b3ad150c6bc01a8cd5030040675060e2adbe6cbc50aadc4da42c6d32ec266e"}, + {file = "regex-2025.9.1-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:aa88d5a82dfe80deaf04e8c39c8b0ad166d5d527097eb9431cb932c44bf88715"}, + {file = "regex-2025.9.1-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6f1dae2cf6c2dbc6fd2526653692c144721b3cf3f769d2a3c3aa44d0f38b9a58"}, + {file = "regex-2025.9.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ff62a3022914fc19adaa76b65e03cf62bc67ea16326cbbeb170d280710a7d719"}, + {file = "regex-2025.9.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a34ef82216189d823bc82f614d1031cb0b919abef27cecfd7b07d1e9a8bdeeb4"}, + {file = "regex-2025.9.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d40e6b49daae9ebbd7fa4e600697372cba85b826592408600068e83a3c47211"}, + {file = "regex-2025.9.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0aeb0fe80331059c152a002142699a89bf3e44352aee28261315df0c9874759b"}, + {file = "regex-2025.9.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a90014d29cb3098403d82a879105d1418edbbdf948540297435ea6e377023ea7"}, + {file = "regex-2025.9.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6ff623271e0b0cc5a95b802666bbd70f17ddd641582d65b10fb260cc0c003529"}, + {file = "regex-2025.9.1-cp39-cp39-win32.whl", hash = "sha256:d161bfdeabe236290adfd8c7588da7f835d67e9e7bf2945f1e9e120622839ba6"}, + {file = "regex-2025.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:43ebc77a7dfe36661192afd8d7df5e8be81ec32d2ad0c65b536f66ebfec3dece"}, + {file = "regex-2025.9.1-cp39-cp39-win_arm64.whl", hash = "sha256:5d74b557cf5554001a869cda60b9a619be307df4d10155894aeaad3ee67c9899"}, + {file = "regex-2025.9.1.tar.gz", hash = "sha256:88ac07b38d20b54d79e704e38aa3bd2c0f8027432164226bdee201a1c0c9c9ff"}, +] + [[package]] name = "requests" version = "2.32.5" @@ -1811,6 +2084,45 @@ files = [ [package.dependencies] pyasn1 = ">=0.1.3" +[[package]] +name = "safetensors" +version = "0.6.2" +description = "" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "safetensors-0.6.2-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:9c85ede8ec58f120bad982ec47746981e210492a6db876882aa021446af8ffba"}, + {file = "safetensors-0.6.2-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d6675cf4b39c98dbd7d940598028f3742e0375a6b4d4277e76beb0c35f4b843b"}, + {file = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d2d2b3ce1e2509c68932ca03ab8f20570920cd9754b05063d4368ee52833ecd"}, + {file = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:93de35a18f46b0f5a6a1f9e26d91b442094f2df02e9fd7acf224cfec4238821a"}, + {file = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89a89b505f335640f9120fac65ddeb83e40f1fd081cb8ed88b505bdccec8d0a1"}, + {file = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4d0d0b937e04bdf2ae6f70cd3ad51328635fe0e6214aa1fc811f3b576b3bda"}, + {file = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8045db2c872db8f4cbe3faa0495932d89c38c899c603f21e9b6486951a5ecb8f"}, + {file = "safetensors-0.6.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:81e67e8bab9878bb568cffbc5f5e655adb38d2418351dc0859ccac158f753e19"}, + {file = "safetensors-0.6.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0e4d029ab0a0e0e4fdf142b194514695b1d7d3735503ba700cf36d0fc7136ce"}, + {file = "safetensors-0.6.2-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:fa48268185c52bfe8771e46325a1e21d317207bcabcb72e65c6e28e9ffeb29c7"}, + {file = "safetensors-0.6.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:d83c20c12c2d2f465997c51b7ecb00e407e5f94d7dec3ea0cc11d86f60d3fde5"}, + {file = "safetensors-0.6.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d944cea65fad0ead848b6ec2c37cc0b197194bec228f8020054742190e9312ac"}, + {file = "safetensors-0.6.2-cp38-abi3-win32.whl", hash = "sha256:cab75ca7c064d3911411461151cb69380c9225798a20e712b102edda2542ddb1"}, + {file = "safetensors-0.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:c7b214870df923cbc1593c3faee16bec59ea462758699bd3fee399d00aac072c"}, + {file = "safetensors-0.6.2.tar.gz", hash = "sha256:43ff2aa0e6fa2dc3ea5524ac7ad93a9839256b8703761e76e2d0b2a3fa4f15d9"}, +] + +[package.extras] +all = ["safetensors[jax]", "safetensors[numpy]", "safetensors[paddlepaddle]", "safetensors[pinned-tf]", "safetensors[quality]", "safetensors[testing]", "safetensors[torch]"] +dev = ["safetensors[all]"] +jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "safetensors[numpy]"] +mlx = ["mlx (>=0.0.9)"] +numpy = ["numpy (>=1.21.6)"] +paddlepaddle = ["paddlepaddle (>=2.4.1)", "safetensors[numpy]"] +pinned-tf = ["safetensors[numpy]", "tensorflow (==2.18.0)"] +quality = ["ruff"] +tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"] +testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools-rust (>=1.5.2)"] +testingfree = ["huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools-rust (>=1.5.2)"] +torch = ["safetensors[numpy]", "torch (>=1.10)"] + [[package]] name = "scikit-learn" version = "1.7.2" @@ -2147,6 +2459,39 @@ files = [ {file = "threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e"}, ] +[[package]] +name = "tokenizers" +version = "0.22.0" +description = "" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "tokenizers-0.22.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:eaa9620122a3fb99b943f864af95ed14c8dfc0f47afa3b404ac8c16b3f2bb484"}, + {file = "tokenizers-0.22.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:71784b9ab5bf0ff3075bceeb198149d2c5e068549c0d18fe32d06ba0deb63f79"}, + {file = "tokenizers-0.22.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec5b71f668a8076802b0241a42387d48289f25435b86b769ae1837cad4172a17"}, + {file = "tokenizers-0.22.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ea8562fa7498850d02a16178105b58803ea825b50dc9094d60549a7ed63654bb"}, + {file = "tokenizers-0.22.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4136e1558a9ef2e2f1de1555dcd573e1cbc4a320c1a06c4107a3d46dc8ac6e4b"}, + {file = "tokenizers-0.22.0-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf5954de3962a5fd9781dc12048d24a1a6f1f5df038c6e95db328cd22964206"}, + {file = "tokenizers-0.22.0-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8337ca75d0731fc4860e6204cc24bb36a67d9736142aa06ed320943b50b1e7ed"}, + {file = "tokenizers-0.22.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a89264e26f63c449d8cded9061adea7b5de53ba2346fc7e87311f7e4117c1cc8"}, + {file = "tokenizers-0.22.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:790bad50a1b59d4c21592f9c3cf5e5cf9c3c7ce7e1a23a739f13e01fb1be377a"}, + {file = "tokenizers-0.22.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:76cf6757c73a10ef10bf06fa937c0ec7393d90432f543f49adc8cab3fb6f26cb"}, + {file = "tokenizers-0.22.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:1626cb186e143720c62c6c6b5371e62bbc10af60481388c0da89bc903f37ea0c"}, + {file = "tokenizers-0.22.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:da589a61cbfea18ae267723d6b029b84598dc8ca78db9951d8f5beff72d8507c"}, + {file = "tokenizers-0.22.0-cp39-abi3-win32.whl", hash = "sha256:dbf9d6851bddae3e046fedfb166f47743c1c7bd11c640f0691dd35ef0bcad3be"}, + {file = "tokenizers-0.22.0-cp39-abi3-win_amd64.whl", hash = "sha256:c78174859eeaee96021f248a56c801e36bfb6bd5b067f2e95aa82445ca324f00"}, + {file = "tokenizers-0.22.0.tar.gz", hash = "sha256:2e33b98525be8453f355927f3cab312c36cd3e44f4d7e9e97da2fa94d0a49dcb"}, +] + +[package.dependencies] +huggingface-hub = ">=0.16.4,<1.0" + +[package.extras] +dev = ["tokenizers[testing]"] +docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] +testing = ["black (==22.3)", "datasets", "numpy", "pytest", "pytest-asyncio", "requests", "ruff"] + [[package]] name = "tqdm" version = "4.67.1" @@ -2169,6 +2514,81 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "transformers" +version = "4.56.1" +description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" +optional = false +python-versions = ">=3.9.0" +groups = ["main"] +files = [ + {file = "transformers-4.56.1-py3-none-any.whl", hash = "sha256:1697af6addfb6ddbce9618b763f4b52d5a756f6da4899ffd1b4febf58b779248"}, + {file = "transformers-4.56.1.tar.gz", hash = "sha256:0d88b1089a563996fc5f2c34502f10516cad3ea1aa89f179f522b54c8311fe74"}, +] + +[package.dependencies] +filelock = "*" +huggingface-hub = ">=0.34.0,<1.0" +numpy = ">=1.17" +packaging = ">=20.0" +pyyaml = ">=5.1" +regex = "!=2019.12.17" +requests = "*" +safetensors = ">=0.4.3" +tokenizers = ">=0.22.0,<=0.23.0" +tqdm = ">=4.27" + +[package.extras] +accelerate = ["accelerate (>=0.26.0)"] +all = ["Pillow (>=10.0.1,<=15.0)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "accelerate (>=0.26.0)", "av", "codecarbon (>=2.8.1)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "jinja2 (>=3.1.0)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "kernels (>=0.6.1,<=0.9)", "librosa", "mistral-common[opencv] (>=1.6.3)", "num2words", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision"] +audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +benchmark = ["optimum-benchmark (>=0.3.0)"] +chat-template = ["jinja2 (>=3.1.0)"] +codecarbon = ["codecarbon (>=2.8.1)"] +deepspeed = ["accelerate (>=0.26.0)", "deepspeed (>=0.9.3)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.26.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "libcst", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "optuna", "parameterized (>=0.9)", "protobuf", "psutil", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "accelerate (>=0.26.0)", "av", "beautifulsoup4", "codecarbon (>=2.8.1)", "cookiecutter (==1.7.3)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "jinja2 (>=3.1.0)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "kernels (>=0.6.1,<=0.9)", "libcst", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "num2words", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict_core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic_lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "libcst", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "onnxconverter-common", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "tf2onnx", "timeout-decorator", "tokenizers (>=0.22.0,<=0.23.0)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "beautifulsoup4", "codecarbon (>=2.8.1)", "cookiecutter (==1.7.3)", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "kenlm", "kernels (>=0.6.1,<=0.9)", "libcst", "libcst", "librosa", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "num2words", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "pandas (<2.3.0)", "parameterized (>=0.9)", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict_core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (!=1.0.18,<=1.0.19)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic_lite (>=1.0.7)", "urllib3 (<2.0.0)"] +flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)", "scipy (<1.13.0)"] +flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +ftfy = ["ftfy"] +hf-xet = ["hf_xet"] +hub-kernels = ["kernels (>=0.6.1,<=0.9)"] +integrations = ["kernels (>=0.6.1,<=0.9)", "optuna", "ray[tune] (>=2.7.0)", "sigopt"] +ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict_core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic_lite (>=1.0.7)"] +mistral-common = ["mistral-common[opencv] (>=1.6.3)"] +modelcreation = ["cookiecutter (==1.7.3)"] +natten = ["natten (>=0.14.6,<0.15.0)"] +num2words = ["num2words"] +onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] +onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] +open-telemetry = ["opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] +optuna = ["optuna"] +quality = ["GitPython (<3.1.19)", "datasets (>=2.15.0)", "libcst", "pandas (<2.3.0)", "rich", "ruff (==0.11.2)", "urllib3 (<2.0.0)"] +ray = ["ray[tune] (>=2.7.0)"] +retrieval = ["datasets (>=2.15.0)", "faiss-cpu"] +ruff = ["ruff (==0.11.2)"] +sagemaker = ["sagemaker (>=2.31.0)"] +sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] +serving = ["accelerate (>=0.26.0)", "fastapi", "openai (>=1.98.0)", "pydantic (>=2)", "starlette", "torch (>=2.2)", "uvicorn"] +sigopt = ["sigopt"] +sklearn = ["scikit-learn"] +speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (>=2.15.0)", "datasets (>=2.15.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "libcst", "mistral-common[opencv] (>=1.6.3)", "nltk (<=3.8.1)", "parameterized (>=0.9)", "psutil", "pydantic (>=2)", "pytest (>=7.2.0)", "pytest-asyncio", "pytest-order", "pytest-rerunfailures", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.11.2)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +tf = ["keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-cpu = ["keras (>2.9,<2.16)", "keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow-cpu (>2.9,<2.16)", "tensorflow-probability (<0.24)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +tiktoken = ["blobfile", "tiktoken"] +timm = ["timm (!=1.0.18,<=1.0.19)"] +tokenizers = ["tokenizers (>=0.22.0,<=0.23.0)"] +torch = ["accelerate (>=0.26.0)", "torch (>=2.2)"] +torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] +torchhub = ["filelock", "huggingface-hub (>=0.34.0,<1.0)", "importlib_metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.22.0,<=0.23.0)", "torch (>=2.2)", "tqdm (>=4.27)"] +video = ["av"] +vision = ["Pillow (>=10.0.1,<=15.0)"] + [[package]] name = "trio" version = "0.30.0" @@ -2336,4 +2756,4 @@ h11 = ">=0.9.0,<1" [metadata] lock-version = "2.1" python-versions = ">=3.11,<3.14" -content-hash = "a2a9a376c855a2599db8f932f42029ee9f016babdf0f004cb53690caa8f5a330" +content-hash = "d02c427c36a8894fe681d04a4c0c45de45d5793a4d264375992ce1104096f620" diff --git a/apps/pre-processing-service/pyproject.toml b/apps/pre-processing-service/pyproject.toml index 42997650..c724ee24 100644 --- a/apps/pre-processing-service/pyproject.toml +++ b/apps/pre-processing-service/pyproject.toml @@ -19,7 +19,7 @@ gunicorn = ">=23.0.0,<24.0.0" requests = ">=2.32.5,<3.0.0" bs4 = ">=0.0.2,<0.0.3" selenium = ">=4.35.0,<5.0.0" -#transformers = ">=4.56.0,<5.0.0" +transformers = ">=4.56.0,<5.0.0" numpy = ">=2.3.2,<3.0.0" scikit-learn = ">=1.7.1,<2.0.0" python-dotenv = ">=1.1.1,<2.0.0" From 1dd3444ab49c43898710450cda2d1d9ab871d47a Mon Sep 17 00:00:00 2001 From: JiHoon Date: Wed, 17 Sep 2025 10:45:37 +0900 Subject: [PATCH 37/57] =?UTF-8?q?chore=20:=20black=20=EC=A0=81=EC=9A=A9=20?= =?UTF-8?q?=EB=B0=8F=20=EC=A3=BC=EC=84=9D=20=EB=B3=80=EA=B2=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/test/test_extraction_html.py | 2 +- .../app/utils/llm_extractor.py | 4 +- .../app/utils/preprocess_html.py | 148 ++++++++++++------ 3 files changed, 99 insertions(+), 55 deletions(-) diff --git a/apps/pre-processing-service/app/test/test_extraction_html.py b/apps/pre-processing-service/app/test/test_extraction_html.py index 0c2ce196..a023972c 100644 --- a/apps/pre-processing-service/app/test/test_extraction_html.py +++ b/apps/pre-processing-service/app/test/test_extraction_html.py @@ -484,4 +484,4 @@ # # # 대기 후 드라이버 종료 # time.sleep(5) -# driver.quit() \ No newline at end of file +# driver.quit() diff --git a/apps/pre-processing-service/app/utils/llm_extractor.py b/apps/pre-processing-service/app/utils/llm_extractor.py index 96903018..4263a270 100644 --- a/apps/pre-processing-service/app/utils/llm_extractor.py +++ b/apps/pre-processing-service/app/utils/llm_extractor.py @@ -6,7 +6,7 @@ class LLMExtractor: - def __init__(self, model = "gpt-4o"): + def __init__(self, model="gpt-4o"): """ LLMExtractor 초기화 :param model: 사용할 LLM 모델 이름 @@ -249,4 +249,4 @@ def tistory_publish_extraction_prompt(self, html: str): # 분석할 HTML: {html} - """ \ No newline at end of file + """ diff --git a/apps/pre-processing-service/app/utils/preprocess_html.py b/apps/pre-processing-service/app/utils/preprocess_html.py index 8be0ae26..6edfb9d6 100644 --- a/apps/pre-processing-service/app/utils/preprocess_html.py +++ b/apps/pre-processing-service/app/utils/preprocess_html.py @@ -3,28 +3,30 @@ from selenium.webdriver.support import expected_conditions as EC import re + def preprocess_html(html_content): """ - 보수적인 HTML 전처리 - 블로그 에디터 요소들을 더 잘 보존 + HTML 전처리 + :param html_content: 원본 HTML 문자열 + :return: 전처리된 HTML 문자열 리스트 """ - soup = BeautifulSoup(html_content, 'html.parser') + soup = BeautifulSoup(html_content, "html.parser") - # 완전히 불필요한 태그들만 제거 (더 보수적) + # 불필요한 태그 제거 unnecessary_tags = [ - 'script', # JavaScript 코드 - 'style', # CSS 스타일 - 'noscript', # JavaScript 비활성화 시 내용 - 'meta', # 메타데이터 - 'link', # 외부 리소스 링크 (중요한 것 제외) - 'head', # head 전체 - 'title', # 페이지 제목 - 'base', # base URL + "script", # JavaScript 코드 + "style", # CSS 스타일 + "noscript", # JavaScript 비활성화 시 내용 + "meta", # 메타데이터 + "link", # 외부 리소스 링크 (중요한 것 제외) + "head", # head 전체 + "title", # 페이지 제목 + "base", # base URL ] for tag_name in unnecessary_tags: for tag in soup.find_all(tag_name): - # link 태그 중 중요한 것은 보존 - if tag_name == 'link' and tag.get('rel') in ['stylesheet', 'icon']: + if tag_name == "link" and tag.get("rel") in ["stylesheet", "icon"]: continue tag.decompose() @@ -34,35 +36,62 @@ def preprocess_html(html_content): comment.extract() # display:none만 제거하고 다른 숨김 요소는 보존 - hidden_elements = soup.find_all(attrs={ - 'style': re.compile(r'display\s*:\s*none', re.I) - }) + hidden_elements = soup.find_all( + attrs={"style": re.compile(r"display\s*:\s*none", re.I)} + ) for element in hidden_elements: element.decompose() - # 중요한 속성들을 더 포괄적으로 보존 + # 중요한 속성들을 보존 important_attributes = { - 'id', 'class', 'name', 'type', 'value', 'href', 'src', 'alt', 'title', - 'placeholder', 'role', 'aria-label', 'aria-describedby', 'aria-expanded', - 'onclick', 'onchange', 'onfocus', 'onblur', - 'disabled', 'readonly', 'required', 'checked', 'selected', 'hidden', - 'tabindex', 'contenteditable', # 이게 중요! - 'spellcheck', 'autocomplete', 'maxlength', 'minlength', - 'for', 'form', 'method', 'action', 'target' + "id", + "class", + "name", + "type", + "value", + "href", + "src", + "alt", + "title", + "placeholder", + "role", + "aria-label", + "aria-describedby", + "aria-expanded", + "onclick", + "onchange", + "onfocus", + "onblur", + "disabled", + "readonly", + "required", + "checked", + "selected", + "hidden", + "tabindex", + "contenteditable", + "spellcheck", + "autocomplete", + "maxlength", + "minlength", + "for", + "form", + "method", + "action", + "target", } - # 속성 제거를 더 보수적으로 수행 for tag in soup.find_all(True): attrs_to_remove = [] for attr_name in tag.attrs.keys(): # data-* 속성은 모두 보존 - if attr_name.startswith('data-'): + if attr_name.startswith("data-"): continue # aria-* 속성도 모두 보존 - if attr_name.startswith('aria-'): + if attr_name.startswith("aria-"): continue # on* 이벤트 속성들도 보존 - if attr_name.startswith('on'): + if attr_name.startswith("on"): continue # 중요 속성이 아니면 제거 if attr_name not in important_attributes: @@ -71,10 +100,18 @@ def preprocess_html(html_content): for attr_name in attrs_to_remove: del tag.attrs[attr_name] - # 빈 태그 제거를 더 신중하게 수행 + # 빈 태그 제거 interactive_tags = { - 'input', 'button', 'select', 'textarea', 'a', 'img', 'br', 'hr', - 'div', 'span' # div, span도 에디터 요소일 수 있으므로 보존 + "input", + "button", + "select", + "textarea", + "a", + "img", + "br", + "hr", + "div", + "span", } def remove_empty_tags_conservative(): @@ -85,16 +122,16 @@ def remove_empty_tags_conservative(): iteration += 1 for tag in soup.find_all(): - # 상호작용 가능한 태그는 건드리지 않음 + # 상호작용 가능한 태그는 보존 if tag.name in interactive_tags: continue # contenteditable 속성이 있으면 보존 - if tag.get('contenteditable'): + if tag.get("contenteditable"): continue - # data-* 속성이 있으면 보존 (React 컴포넌트일 가능성) - if any(attr.startswith('data-') for attr in tag.attrs.keys()): + # data-* 속성이 있으면 보존 + if any(attr.startswith("data-") for attr in tag.attrs.keys()): continue # 텍스트도 없고 자식 요소도 없으면 제거 @@ -104,16 +141,17 @@ def remove_empty_tags_conservative(): remove_empty_tags_conservative() - # 연속된 공백 정리 (더 보수적) + # 연속된 공백 정리 for text_node in soup.find_all(string=True): - if text_node.parent.name not in ['script', 'style']: - cleaned_text = re.sub(r'\s+', ' ', str(text_node)) + if text_node.parent.name not in ["script", "style"]: + cleaned_text = re.sub(r"\s+", " ", str(text_node)) if cleaned_text != str(text_node): text_node.replace_with(cleaned_text) html_list = _chunking_html(str(soup)) return html_list + def _chunking_html(html_content, chunk_size=50000): """ HTML을 지정된 크기로 분할하는 메서드 @@ -123,35 +161,41 @@ def _chunking_html(html_content, chunk_size=50000): """ chunks = [] for i in range(0, len(html_content), chunk_size): - chunks.append(html_content[i:i + chunk_size]) + chunks.append(html_content[i : i + chunk_size]) return chunks + def wait_for_tistory_editor_complete(driver, timeout=30): """ 티스토리 TinyMCE 에디터가 완전히 로드될 때까지 대기 """ from selenium.webdriver.support.ui import WebDriverWait - wait = WebDriverWait(driver, timeout) - print("🎯 티스토리 에디터 로딩 대기 중...") + wait = WebDriverWait(driver, timeout) - # 1단계: 페이지 기본 로딩 + # 페이지 기본 로딩 wait.until(lambda d: d.execute_script("return document.readyState") == "complete") - # 2단계: TinyMCE 라이브러리 로딩 + # TinyMCE 라이브러리 로딩 wait.until(lambda d: d.execute_script("return typeof tinymce !== 'undefined'")) - # 3단계: 에디터 인스턴스 초기화 - wait.until(lambda d: d.execute_script(""" + # 에디터 인스턴스 초기화 + wait.until( + lambda d: d.execute_script( + """ return tinymce.get('editor-tistory') && tinymce.get('editor-tistory').initialized - """)) + """ + ) + ) - # 4단계: iframe 준비 + # iframe 준비 wait.until(EC.presence_of_element_located((By.ID, "editor-tistory_ifr"))) - # 5단계: iframe 내부 document 준비 - wait.until(lambda d: d.execute_script(""" + # iframe 내부 document 준비 + wait.until( + lambda d: d.execute_script( + """ try { var editor = tinymce.get('editor-tistory'); var doc = editor.getDoc(); @@ -159,8 +203,8 @@ def wait_for_tistory_editor_complete(driver, timeout=30): } catch (e) { return false; } - """)) + """ + ) + ) - print("✅ 티스토리 에디터 완전 로딩 완료!") return True - From 4cc59df94241d8435294f87dce6d65c415744fe9 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Wed, 17 Sep 2025 10:52:04 +0900 Subject: [PATCH 38/57] =?UTF-8?q?feat:=20=EB=B8=94=EB=A1=9C=EA=B7=B8=20Rag?= =?UTF-8?q?=EA=B8=B0=EB=B0=98=20=EC=BD=98=ED=85=90=EC=B8=A0=20=EC=83=9D?= =?UTF-8?q?=EC=84=B1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/service/blog/blog_create_service.py | 298 ++++++++++++++ .../service/product_blog_posting_service.py | 387 ------------------ 2 files changed, 298 insertions(+), 387 deletions(-) create mode 100644 apps/pre-processing-service/app/service/blog/blog_create_service.py delete mode 100644 apps/pre-processing-service/app/service/product_blog_posting_service.py diff --git a/apps/pre-processing-service/app/service/blog/blog_create_service.py b/apps/pre-processing-service/app/service/blog/blog_create_service.py new file mode 100644 index 00000000..b0e06907 --- /dev/null +++ b/apps/pre-processing-service/app/service/blog/blog_create_service.py @@ -0,0 +1,298 @@ +import json +import logging +import os +from datetime import datetime +from typing import Dict, List, Optional, Any + +from openai import OpenAI +from dotenv import load_dotenv + +from app.schemas import RequestBlogCreate +from app.errors.BlogPostingException import * + +# 환경변수 로드 +load_dotenv(".env.dev") + +client = OpenAI() + + +class BlogContentService: + """RAG를 사용한 블로그 콘텐츠 생성 전용 서비스""" + + def __init__(self): + # OpenAI API 키 설정 + self.openai_api_key = os.getenv("OPENAI_API_KEY") + if not self.openai_api_key: + raise ValueError("OPENAI_API_KEY가 .env.dev 파일에 설정되지 않았습니다.") + + client.api_key = self.openai_api_key + logging.basicConfig(level=logging.INFO) + self.logger = logging.getLogger(__name__) + + def generate_blog_content(self, request: RequestBlogCreate) -> Dict[str, Any]: + """ + 요청 데이터를 기반으로 블로그 콘텐츠 생성 + + Args: + request: RequestBlogCreate 객체 + + Returns: + Dict: {"title": str, "content": str, "tags": List[str]} 형태의 결과 + """ + try: + # 1. 콘텐츠 정보 정리 + content_context = self._prepare_content_context(request) + + # 2. 프롬프트 생성 + prompt = self._create_content_prompt(content_context, request) + + # 3. GPT를 통한 콘텐츠 생성 + generated_content = self._generate_with_openai(prompt) + + # 4. 콘텐츠 파싱 및 구조화 + return self._parse_generated_content(generated_content, request) + + except Exception as e: + self.logger.error(f"콘텐츠 생성 실패: {e}") + return self._create_fallback_content(request) + + def _prepare_content_context(self, request: RequestBlogCreate) -> str: + """요청 데이터를 콘텐츠 생성용 컨텍스트로 변환""" + context_parts = [] + + # 키워드 정보 추가 + if request.keyword: + context_parts.append(f"주요 키워드: {request.keyword}") + + # 상품 정보 추가 + if request.product_info: + context_parts.append("\n상품 정보:") + + # 상품 기본 정보 + if request.product_info.get("title"): + context_parts.append(f"- 상품명: {request.product_info['title']}") + + if request.product_info.get("price"): + context_parts.append(f"- 가격: {request.product_info['price']:,}원") + + if request.product_info.get("rating"): + context_parts.append(f"- 평점: {request.product_info['rating']}/5.0") + + # 상품 상세 정보 + if request.product_info.get("description"): + context_parts.append(f"- 설명: {request.product_info['description']}") + + # 상품 사양 (material_info 등) + if request.product_info.get("material_info"): + context_parts.append("- 주요 사양:") + specs = request.product_info["material_info"] + if isinstance(specs, dict): + for key, value in specs.items(): + context_parts.append(f" * {key}: {value}") + + # 상품 옵션 + if request.product_info.get("options"): + options = request.product_info["options"] + context_parts.append(f"- 구매 옵션 ({len(options)}개):") + for i, option in enumerate(options[:5], 1): # 최대 5개만 + if isinstance(option, dict): + option_name = option.get("name", f"옵션 {i}") + context_parts.append(f" {i}. {option_name}") + else: + context_parts.append(f" {i}. {option}") + + # 구매 링크 + if request.product_info.get("url") or request.product_info.get("product_url"): + url = request.product_info.get("url") or request.product_info.get("product_url") + context_parts.append(f"- 구매 링크: {url}") + + return "\n".join(context_parts) if context_parts else "키워드 기반 콘텐츠 생성" + + def _create_content_prompt(self, context: str, request: RequestBlogCreate) -> str: + """콘텐츠 생성용 프롬프트 생성""" + + # 기본 키워드가 없으면 상품 제목에서 추출 + main_keyword = request.keyword + if not main_keyword and request.product_info and request.product_info.get("title"): + main_keyword = request.product_info["title"] + + prompt = f""" +다음 정보를 바탕으로 매력적인 블로그 포스트를 작성해주세요. + +정보: +{context} + +작성 가이드라인: +- 스타일: 친근하면서도 신뢰할 수 있는, 정보 제공 중심 +- 길이: 1200자 내외의 적당한 길이 +- 톤: 독자의 관심을 끄는 자연스러운 어조 + +작성 요구사항: +1. SEO 친화적이고 클릭하고 싶은 매력적인 제목 +2. 독자의 관심을 끄는 도입부 +3. 핵심 특징과 장점을 구체적으로 설명 +4. 실제 사용 시나리오나 활용 팁 +5. 구매 결정에 도움이 되는 정보 + +⚠️ 주의: +- 절대로 마지막에 'HTML 구조는…' 같은 자기 평가 문장을 추가하지 마세요. +- 출력 시 ```나 ```html 같은 코드 블록 구문을 포함하지 마세요. +- 오직 HTML 태그만 사용하여 구조화된 콘텐츠를 작성해주세요. +(예:

      ,

      ,

      ,

        ,
      • 등) +""" + + return prompt + + def _generate_with_openai(self, prompt: str) -> str: + """OpenAI API를 통한 콘텐츠 생성""" + try: + response = client.chat.completions.create( + model="gpt-4o-mini", + messages=[ + { + "role": "system", + "content": "당신은 전문적인 블로그 콘텐츠 작성자입니다. 상품 리뷰와 정보성 콘텐츠를 매력적이고 SEO 친화적으로 작성합니다.", + }, + {"role": "user", "content": prompt}, + ], + temperature=0.7, + max_tokens=2000, + ) + + return response.choices[0].message.content + + except Exception as e: + self.logger.error(f"OpenAI API 호출 실패: {e}") + raise + + def _parse_generated_content(self, content: str, request: RequestBlogCreate) -> Dict[str, Any]: + """생성된 콘텐츠를 파싱하여 구조화""" + + # 제목 추출 (첫 번째 h1이나 강조된 줄) + lines = content.strip().split("\n") + title = "블로그 포스트" # 기본값 + + for line in lines[:10]: # 처음 10줄에서 제목 찾기 + clean_line = ( + line.strip() + .replace("#", "") + .replace("

        ", "") + .replace("

        ", "") + .replace("

        ", "") + .replace("

        ", "") + ) + if clean_line and len(clean_line) > 5 and len(clean_line) < 100: + title = clean_line + break + + # 키워드가 있으면 제목에 없을 경우 기본 제목 생성 + if request.keyword and request.keyword not in title: + if request.product_info and request.product_info.get("title"): + title = f"{request.product_info['title']} - {request.keyword} 완벽 가이드" + else: + title = f"{request.keyword} - 완벽 가이드" + + # 태그 생성 + tags = self._generate_tags(request) + + return { + "title": title, + "content": content, + "tags": tags + } + + def _generate_tags(self, request: RequestBlogCreate) -> List[str]: + """요청 정보 기반 태그 생성""" + tags = [] + + # 키워드 추가 + if request.keyword: + tags.append(request.keyword) + + # 상품 정보에서 태그 추출 + if request.product_info: + # 상품명에서 키워드 추출 + if request.product_info.get("title"): + title = request.product_info["title"].lower() + + # 일반적인 제품 카테고리 태그 + if any(word in title for word in ["iphone", "아이폰", "phone"]): + tags.extend(["아이폰", "스마트폰"]) + if any(word in title for word in ["필름", "보호", "강화"]): + tags.extend(["보호필름", "강화필름"]) + if any(word in title for word in ["케이스", "커버"]): + tags.extend(["폰케이스", "액세서리"]) + if any(word in title for word in ["노트북", "laptop"]): + tags.extend(["노트북", "컴퓨터"]) + if any(word in title for word in ["마우스", "키보드"]): + tags.extend(["컴퓨터용품", "PC액세서리"]) + + # 재료/사양 정보에서 태그 생성 + if request.product_info.get("material_info"): + material_info = request.product_info["material_info"] + if isinstance(material_info, dict): + for key, value in material_info.items(): + if value and len(str(value).strip()) <= 20: + clean_value = str(value).strip() + if clean_value not in tags: + tags.append(clean_value) + + # 기본 태그 추가 + if not tags: + tags = ["상품정보", "리뷰"] + + # 중복 제거 및 개수 제한 + unique_tags = [] + for tag in tags: + if tag not in unique_tags and len(unique_tags) < 10: + unique_tags.append(tag) + + return unique_tags + + def _create_fallback_content(self, request: RequestBlogCreate) -> Dict[str, Any]: + """콘텐츠 생성 실패 시 대안 콘텐츠 생성""" + + if request.product_info and request.product_info.get("title"): + title = f"{request.product_info['title']} - 상품 정보 및 구매 가이드" + product_name = request.product_info["title"] + elif request.keyword: + title = f"{request.keyword} - 완벽 가이드" + product_name = request.keyword + else: + title = "상품 정보 및 구매 가이드" + product_name = "상품" + + content = f""" +

        {title}

        + +

        상품 소개

        +

        {product_name}에 대한 상세한 정보를 소개합니다.

        + +

        주요 특징

        +
          +
        • 고품질의 제품으로 신뢰할 수 있는 브랜드입니다
        • +
        • 합리적인 가격으로 가성비가 뛰어납니다
        • +
        • 사용자 친화적인 디자인과 기능을 제공합니다
        • +
        +""" + + if request.product_info: + if request.product_info.get("price"): + content += f"

        가격 정보

        \n

        판매가: {request.product_info['price']:,}원

        \n" + + if request.product_info.get("material_info"): + content += "

        상품 사양

        \n
          \n" + for key, value in request.product_info["material_info"].items(): + content += f"
        • {key}: {value}
        • \n" + content += "
        \n" + + content += """ +

        구매 안내

        +

        신중한 검토를 통해 만족스러운 구매 결정을 내리시기 바랍니다.

        +""" + + return { + "title": title, + "content": content, + "tags": self._generate_tags(request) + } \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/product_blog_posting_service.py b/apps/pre-processing-service/app/service/product_blog_posting_service.py deleted file mode 100644 index 129c4666..00000000 --- a/apps/pre-processing-service/app/service/product_blog_posting_service.py +++ /dev/null @@ -1,387 +0,0 @@ -import json -import logging -import os -from datetime import datetime -from typing import Dict, List, Optional, Any -from dataclasses import dataclass -from enum import Enum - -from openai import OpenAI -from dotenv import load_dotenv - -from app.service.blog.blogger_blog_post_adapter import BloggerBlogPostAdapter -from app.errors.BlogPostingException import * - -# 환경변수 로드 -load_dotenv(".env.dev") - -client = OpenAI() - - -class PostingStatus(Enum): - PENDING = "pending" - PROCESSING = "processing" - SUCCESS = "success" - FAILED = "failed" - RETRY = "retry" - - -@dataclass -class ProductData: - """크롤링된 상품 데이터 모델""" - - tag: str - product_url: str - title: str - price: int - rating: float - options: List[Dict[str, Any]] - material_info: Dict[str, str] - product_images: List[str] - crawled_at: str - - @classmethod - def from_dict(cls, data: Dict) -> "ProductData": - """딕셔너리에서 ProductData 객체 생성""" - product_detail = data.get("product_detail", {}) - return cls( - tag=data.get("tag", ""), - product_url=product_detail.get("url", ""), - title=product_detail.get("title", ""), - price=product_detail.get("price", 0), - rating=product_detail.get("rating", 0.0), - options=product_detail.get("options", []), - material_info=product_detail.get("material_info", {}), - product_images=product_detail.get("product_images", []), - crawled_at=data.get("crawled_at", ""), - ) - - -@dataclass -class BlogPostContent: - """생성된 블로그 포스트 콘텐츠""" - - title: str - content: str - tags: List[str] - - -@dataclass -class BlogContentRequest: - """블로그 콘텐츠 생성 요청""" - - content_style: str = "informative" # "informative", "promotional", "review" - target_keywords: List[str] = None - include_pricing: bool = True - include_specifications: bool = True - content_length: str = "medium" # "short", "medium", "long" - - -class ProductContentGenerator: - """GPT를 활용한 상품 블로그 콘텐츠 생성""" - - def __init__(self): - # 환경변수에서 OpenAI API 키 로드 - self.openai_api_key = os.getenv("OPENAI_API_KEY") - if not self.openai_api_key: - raise ValueError("OPENAI_API_KEY가 .env.dev 파일에 설정되지 않았습니다.") - - client.api_key = self.openai_api_key - - def generate_blog_content( - self, product_data: ProductData, request: BlogContentRequest - ) -> BlogPostContent: - """상품 데이터를 기반으로 블로그 콘텐츠 생성""" - - # 1. 상품 정보 정리 - product_info = self._format_product_info(product_data, request) - - # 2. 프롬프트 생성 - prompt = self._create_blog_prompt(product_info, request) - - # 3. GPT를 통한 콘텐츠 생성 - try: - - response = client.chat.completions.create( - model="gpt-4o-mini", - messages=[ - { - "role": "system", - "content": "당신은 전문적인 블로그 콘텐츠 작성자입니다. 상품 리뷰와 정보성 콘텐츠를 매력적이고 SEO 친화적으로 작성합니다.", - }, - {"role": "user", "content": prompt}, - ], - temperature=0.7, - max_tokens=2000, - ) - - generated_content = response.choices[0].message.content - - # 4. 콘텐츠 파싱 및 구조화 - return self._parse_generated_content( - generated_content, product_data, request - ) - - except Exception as e: - logging.error(f"콘텐츠 생성 실패: {e}") - return self._create_fallback_content(product_data, request) - - def _format_product_info( - self, product_data: ProductData, request: BlogContentRequest - ) -> str: - """상품 정보를 텍스트로 포맷팅""" - info_parts = [ - f"상품명: {product_data.title}", - ] - - # 가격 정보 추가 - if request.include_pricing and product_data.price: - info_parts.append(f"가격: {product_data.price:,}원") - - # 평점 정보 추가 - if product_data.rating: - info_parts.append(f"평점: {product_data.rating}/5.0") - - # 사양 정보 추가 - if request.include_specifications and product_data.material_info: - info_parts.append("\n상품 사양:") - for key, value in product_data.material_info.items(): - info_parts.append(f"- {key}: {value}") - - # 옵션 정보 추가 - if product_data.options: - info_parts.append(f"\n구매 옵션 ({len(product_data.options)}개):") - for i, option in enumerate(product_data.options[:5], 1): # 처음 5개만 - info_parts.append(f"{i}. {option.get('name', 'N/A')}") - - # 구매 링크 - if product_data.product_url: - info_parts.append(f"\n구매 링크: {product_data.product_url}") - - return "\n".join(info_parts) - - def _create_blog_prompt( - self, product_info: str, request: BlogContentRequest - ) -> str: - """블로그 작성용 프롬프트 생성""" - - # 스타일별 가이드라인 - style_guidelines = { - "informative": "객관적이고 상세한 정보 제공 중심으로, 독자가 제품을 이해할 수 있도록 전문적으로 작성", - "promotional": "제품의 장점과 매력을 강조하며, 구매 의욕을 자극할 수 있도록 매력적으로 작성", - "review": "실제 사용 경험을 바탕으로 한 솔직한 평가와 추천 중심으로 작성", - } - - # 길이별 가이드라인 - length_guidelines = { - "short": "800자 내외의 간결한 내용", - "medium": "1200자 내외의 적당한 길이", - "long": "1500자 이상의 상세한 내용", - } - - style_guide = style_guidelines.get( - request.content_style, style_guidelines["informative"] - ) - length_guide = length_guidelines.get( - request.content_length, length_guidelines["medium"] - ) - - # 키워드 정보 - keywords_text = "" - if request.target_keywords: - keywords_text = f"\n포함할 키워드: {', '.join(request.target_keywords)}" - - prompt = f""" -다음 상품 정보를 바탕으로 매력적인 블로그 포스트를 작성해주세요. - -상품 정보: -{product_info} - -작성 가이드라인: -- 스타일: {style_guide} -- 길이: {length_guide} -- 톤: 친근하면서도 신뢰할 수 있는, 정보 제공 중심{keywords_text} - -작성 요구사항: -1. SEO 친화적이고 클릭하고 싶은 매력적인 제목 -2. 독자의 관심을 끄는 도입부 -3. 상품의 핵심 특징과 장점을 구체적으로 설명 -4. 실제 사용 시나리오나 활용 팁 -5. 구매 결정에 도움이 되는 정보 - -⚠️ 주의: -- 절대로 마지막에 '이 HTML 구조는…' 같은 자기 평가 문장을 추가하지 마세요. -- 출력 시 ```나 ```html 같은 코드 블록 구문을 포함하지 마세요. -- 오직 HTML 태그만 사용하여 구조화된 콘텐츠를 작성해주세요. -(예:

        ,

        ,

        ,

          ,
        • 등) -""" - - return prompt - - def _parse_generated_content( - self, content: str, product_data: ProductData, request: BlogContentRequest - ) -> BlogPostContent: - """생성된 콘텐츠를 파싱하여 구조화""" - - # 제목 추출 (첫 번째 h1이나 강조된 줄) - lines = content.strip().split("\n") - title = product_data.title # 기본값 - - for line in lines[:10]: # 처음 10줄에서 제목 찾기 - clean_line = ( - line.strip().replace("#", "").replace("

          ", "").replace("

          ", "") - ) - if ( - clean_line - and len(clean_line) > 5 - and ("제목" in line or "

          " in line or line.startswith("#")) - ): - title = clean_line - break - elif clean_line and len(clean_line) > 10 and len(clean_line) < 100: - # 적당한 길이의 첫 번째 줄을 제목으로 - title = clean_line - break - - # 태그 생성 - tags = self._generate_tags_from_product(product_data, request) - - return BlogPostContent(title=title, content=content, tags=tags) - - def _generate_tags_from_product( - self, product_data: ProductData, request: BlogContentRequest - ) -> List[str]: - """상품 정보 기반 태그 생성""" - tags = [] - - # 사용자 지정 키워드가 있으면 우선 추가 - if request.target_keywords: - tags.extend(request.target_keywords[:5]) - - # 기본 태그 추가 - if product_data.tag: - tags.append(product_data.tag) - - # 제품 타입 추론해서 태그 추가 - title_lower = product_data.title.lower() - if any(word in title_lower for word in ["iphone", "아이폰", "phone"]): - tags.extend(["아이폰", "스마트폰"]) - if any(word in title_lower for word in ["필름", "보호", "강화"]): - tags.extend(["보호필름", "강화필름"]) - if any(word in title_lower for word in ["케이스", "커버"]): - tags.extend(["폰케이스", "액세서리"]) - - # 재료 정보에서 태그 생성 - if product_data.material_info: - for key, value in product_data.material_info.items(): - if value and len(value.strip()) <= 20: # 너무 긴 값은 제외 - clean_value = value.strip() - if clean_value not in tags: - tags.append(clean_value) - - # 중복 제거 및 개수 제한 - unique_tags = [] - for tag in tags: - if tag not in unique_tags and len(unique_tags) < 10: - unique_tags.append(tag) - - return unique_tags - - def _create_fallback_content( - self, product_data: ProductData, request: BlogContentRequest - ) -> BlogPostContent: - """콘텐츠 생성 실패 시 대안 콘텐츠 생성""" - title = f"{product_data.title} - 상품 정보 및 구매 가이드" - - content = f""" -

          {product_data.title}

          - -

          상품 소개

          -

          {product_data.title}에 대한 상세한 정보를 소개합니다.

          - -

          가격 정보

          -

          판매가: {product_data.price:,}원

          -""" - - if product_data.material_info: - content += "

          상품 사양

          \n
            \n" - for key, value in product_data.material_info.items(): - content += f"
          • {key}: {value}
          • \n" - content += "
          \n" - - if product_data.options: - content += f"

          구매 옵션 ({len(product_data.options)}가지)

          \n
            \n" - for option in product_data.options[:5]: - content += f"
          • {option.get('name', 'N/A')}
          • \n" - content += "
          \n" - - content += f""" -

          구매 안내

          -

          상품 구매는 여기에서 가능합니다.

          -""" - - return BlogPostContent( - title=title, - content=content, - tags=[product_data.tag] if product_data.tag else ["상품정보"], - ) - - -class ProductBlogPostingService: - """상품 데이터를 Blogger에 포스팅하는 메인 서비스""" - - def __init__(self): - self.content_generator = ProductContentGenerator() - self.blogger_service = BloggerBlogPostAdapter() - - def post_product_to_blogger( - self, product_data: ProductData, request: BlogContentRequest - ) -> dict: - """상품 데이터를 Blogger에 포스팅""" - try: - # 1. GPT를 통한 콘텐츠 생성 - blog_content = self.content_generator.generate_blog_content( - product_data, request - ) - - # 2. Blogger에 포스팅 - self.blogger_service.post_content( - title=blog_content.title, - content=blog_content.content, - tags=blog_content.tags, - ) - - # 3. 성공 결과 반환 - return { - "status": "success", - "platform": "blogger", - "title": blog_content.title, - "tags": blog_content.tags, - "posted_at": datetime.now().isoformat(), - "product_tag": product_data.tag, - } - - except Exception as e: - logging.error(f"Blogger 포스팅 실패: {e}") - # ProductData 객체 기준으로 처리 - return { - "status": "failed", - "error": str(e), - "platform": "blogger", - "attempted_at": datetime.now().isoformat(), - "product_tag": getattr(product_data, "tag", "unknown"), - } - - # def batch_post_products(self, products_data: List[Dict], request: BlogContentRequest) -> List[Dict[str, Any]]: - # """여러 상품을 일괄 포스팅""" - # results = [] - # - # for product_data in products_data: - # result = self.post_product_to_blogger(product_data, request) - # results.append(result) - # - # # API 호출 제한을 고려한 딜레이 - # import time - # time.sleep(3) # 3초 대기 - # - # return results From 39ce9518069be2dfd2a4fdfc01677c3d6a8d82f6 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Wed, 17 Sep 2025 11:12:54 +0900 Subject: [PATCH 39/57] =?UTF-8?q?feat:=20=EB=B8=94=EB=A1=9C=EA=B7=B8=20Rag?= =?UTF-8?q?=EA=B8=B0=EB=B0=98=20=EC=BD=98=ED=85=90=EC=B8=A0=20=EC=83=9D?= =?UTF-8?q?=EC=84=B1=20=ED=85=8C=EC=8A=A4=ED=8A=B8=20=EC=BD=94=EB=93=9C=20?= =?UTF-8?q?=EC=9E=91=EC=84=B1=20=EC=99=84=EB=A3=8C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/service/blog/blog_create_service.py | 50 +++++++++-- .../app/test/test_blog_create_service.py | 84 ++++++++++++++++++ .../test/test_product_blog_posting_service.py | 87 ------------------- 3 files changed, 128 insertions(+), 93 deletions(-) create mode 100644 apps/pre-processing-service/app/test/test_blog_create_service.py delete mode 100644 apps/pre-processing-service/app/test/test_product_blog_posting_service.py diff --git a/apps/pre-processing-service/app/service/blog/blog_create_service.py b/apps/pre-processing-service/app/service/blog/blog_create_service.py index b0e06907..b2d61c21 100644 --- a/apps/pre-processing-service/app/service/blog/blog_create_service.py +++ b/apps/pre-processing-service/app/service/blog/blog_create_service.py @@ -7,14 +7,12 @@ from openai import OpenAI from dotenv import load_dotenv -from app.schemas import RequestBlogCreate +from app.model.schemas import RequestBlogCreate from app.errors.BlogPostingException import * # 환경변수 로드 load_dotenv(".env.dev") -client = OpenAI() - class BlogContentService: """RAG를 사용한 블로그 콘텐츠 생성 전용 서비스""" @@ -25,7 +23,8 @@ def __init__(self): if not self.openai_api_key: raise ValueError("OPENAI_API_KEY가 .env.dev 파일에 설정되지 않았습니다.") - client.api_key = self.openai_api_key + # 인스턴스 레벨에서 클라이언트 생성 + self.client = OpenAI(api_key=self.openai_api_key) logging.basicConfig(level=logging.INFO) self.logger = logging.getLogger(__name__) @@ -146,7 +145,7 @@ def _create_content_prompt(self, context: str, request: RequestBlogCreate) -> st def _generate_with_openai(self, prompt: str) -> str: """OpenAI API를 통한 콘텐츠 생성""" try: - response = client.chat.completions.create( + response = self.client.chat.completions.create( model="gpt-4o-mini", messages=[ { @@ -295,4 +294,43 @@ def _create_fallback_content(self, request: RequestBlogCreate) -> Dict[str, Any] "title": title, "content": content, "tags": self._generate_tags(request) - } \ No newline at end of file + } + +# if __name__ == '__main__': +# # 테스트용 요청 데이터 +# test_request = RequestBlogCreate( +# keyword="아이폰 케이스", +# product_info={ +# "title": "아이폰 15 프로 투명 케이스", +# "price": 29900, +# "rating": 4.8, +# "description": "9H 강화 보호 기능을 제공하는 투명 케이스", +# "material_info": { +# "소재": "TPU + PC", +# "두께": "1.2mm", +# "색상": "투명", +# "호환성": "아이폰 15 Pro" +# }, +# "options": [ +# {"name": "투명"}, +# {"name": "반투명"}, +# {"name": "블랙"} +# ], +# "url": "https://example.com/iphone-case" +# } +# ) +# +# # 서비스 실행 +# service = BlogContentService() +# print("=== 블로그 콘텐츠 생성 테스트 ===") +# print(f"키워드: {test_request.keyword}") +# print(f"상품: {test_request.product_info['title']}") +# print("\n--- 생성 시작 ---") +# +# result = service.generate_blog_content(test_request) +# +# print(f"\n=== 생성 결과 ===") +# print(f"제목: {result['title']}") +# print(f"\n태그: {', '.join(result['tags'])}") +# print(f"\n내용:\n{result['content']}") +# print(f"\n글자수: {len(result['content'])}자") \ No newline at end of file diff --git a/apps/pre-processing-service/app/test/test_blog_create_service.py b/apps/pre-processing-service/app/test/test_blog_create_service.py new file mode 100644 index 00000000..0c6d447c --- /dev/null +++ b/apps/pre-processing-service/app/test/test_blog_create_service.py @@ -0,0 +1,84 @@ +import unittest +from unittest.mock import patch, MagicMock + +from app.service.blog.blog_create_service import BlogContentService +from app.model.schemas import RequestBlogCreate + + +class TestBlogContentGeneration(unittest.TestCase): + """블로그 콘텐츠 생성 핵심 로직 테스트""" + + @patch.dict('os.environ', {'OPENAI_API_KEY': 'test-key'}) + @patch('app.service.blog.blog_create_service.OpenAI') + def setUp(self, mock_openai_class): + """테스트 설정 - OpenAI Mock 적용""" + # Mock OpenAI 클라이언트 설정 + self.mock_client = MagicMock() + mock_openai_class.return_value = self.mock_client + + # 서비스 인스턴스 생성 + self.service = BlogContentService() + + def test_generate_blog_content_success(self): + """정상적인 콘텐츠 생성 테스트""" + # Mock 응답 설정 + mock_choice = MagicMock() + mock_choice.message.content = """

          아이폰 15 케이스 완벽 가이드

          +

          제품 소개

          +

          이 케이스는 뛰어난 보호 성능을 제공합니다.

          """ + + mock_response = MagicMock() + mock_response.choices = [mock_choice] + + self.mock_client.chat.completions.create.return_value = mock_response + + # 테스트 요청 + request = RequestBlogCreate( + keyword="아이폰 케이스", + product_info={ + "title": "아이폰 15 투명 케이스", + "price": 25000 + } + ) + + # 실행 + result = self.service.generate_blog_content(request) + + # 검증 + self.assertIn("title", result) + self.assertIn("content", result) + self.assertIn("tags", result) + # 실제 파싱 로직에 따른 제목 검증 (키워드가 제목에 포함되지 않아 기본 제목 생성됨) + self.assertEqual(result["title"], "아이폰 15 투명 케이스 - 아이폰 케이스 완벽 가이드") + self.assertIn("

          ", result["content"]) + self.assertIn("아이폰 케이스", result["tags"]) + + def test_generate_blog_content_api_failure(self): + """API 실패 시 폴백 콘텐츠 생성 테스트""" + # API 실패 시뮬레이션 + self.mock_client.chat.completions.create.side_effect = Exception("API Error") + + request = RequestBlogCreate(keyword="테스트 키워드") + + # 실행 + result = self.service.generate_blog_content(request) + + # 폴백 콘텐츠 검증 + self.assertIn("title", result) + self.assertIn("content", result) + self.assertIn("tags", result) + self.assertEqual(result["title"], "테스트 키워드 - 완벽 가이드") + + def test_generate_blog_content_minimal_input(self): + """최소한의 입력으로 콘텐츠 생성 테스트""" + # API 실패 시뮬레이션 + self.mock_client.chat.completions.create.side_effect = Exception("API Error") + + request = RequestBlogCreate() + + result = self.service.generate_blog_content(request) + + # 기본 콘텐츠 생성 확인 + self.assertEqual(result["title"], "상품 정보 및 구매 가이드") + self.assertIn("

          ", result["content"]) + self.assertEqual(result["tags"], ["상품정보", "리뷰"]) \ No newline at end of file diff --git a/apps/pre-processing-service/app/test/test_product_blog_posting_service.py b/apps/pre-processing-service/app/test/test_product_blog_posting_service.py deleted file mode 100644 index c5b1efde..00000000 --- a/apps/pre-processing-service/app/test/test_product_blog_posting_service.py +++ /dev/null @@ -1,87 +0,0 @@ -import pytest -from app.service.product_blog_posting_service import ( - ProductBlogPostingService, - BlogContentRequest, - ProductData, -) - -# 샘플 데이터 -sample_product_data = { - "tag": "test001", - "product_url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=902500949447", - "status": "success", - "product_detail": { - "url": "https://ssadagu.kr/shop/view.php?platform=1688&num_iid=902500949447", - "title": "코닝 적용 가능한 애플 13 강화 필름 iphone16/15promax 휴대 전화 필름 애플 11 안티-peep 및 먼지없는 빈", - "price": 430, - "rating": 5.0, - "options": [ - {"name": "먼지 없는 창고 2차 필름 [코닝글라스 방폭丨초투명]", "stock": 0}, - { - "name": "먼지 없는 창고 2차 필름 [코닝글라스 방폭丨훔쳐보기 방지]", - "stock": 0, - }, - ], - "material_info": { - "상표": "다른", - "재료": "강화 유리", - "필름 종류": "전막", - "크기": "애플 16프로맥스( 6.9inch )", - "적용 모델": "iPhone13 Pro Max", - }, - "product_images": [], - }, - "crawled_at": "2025-09-16 11:49:24", -} - - -@pytest.fixture -def blog_service(): - return ProductBlogPostingService() - - -def test_generate_blog_content(blog_service): - """GPT를 통한 블로그 콘텐츠 생성 테스트""" - request = BlogContentRequest( - content_style="informative", - target_keywords=["아이폰", "강화필름", "보호필름", "스마트폰액세서리"], - include_pricing=True, - content_length="medium", - ) - - product_obj = ProductData.from_dict(sample_product_data) - - # 순수 콘텐츠 생성만 테스트 - blog_content = blog_service.content_generator.generate_blog_content( - product_obj, request - ) - - assert blog_content.title - assert "

          " in blog_content.content - assert len(blog_content.tags) > 0 - - -def test_post_product_to_blogger(blog_service, monkeypatch): - """Blogger 포스팅 테스트 (실제 API 호출을 막고 mock)""" - - class MockBloggerAdapter: - def post_content(self, title, content, tags): - return {"mock": True} - - monkeypatch.setattr(blog_service, "blogger_service", MockBloggerAdapter()) - - request = BlogContentRequest( - content_style="informative", - target_keywords=["아이폰", "강화필름", "보호필름", "스마트폰액세서리"], - include_pricing=True, - content_length="medium", - ) - - product_obj = ProductData.from_dict(sample_product_data) - - result = blog_service.post_product_to_blogger(product_obj, request) - - assert result["status"] == "success" - assert result["platform"] == "blogger" - assert "title" in result - assert "tags" in result From b7bf20bb3c2e42256c92ed4a78f6bfe74dd9f224 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Wed, 17 Sep 2025 11:28:22 +0900 Subject: [PATCH 40/57] chore: poetry run black . --- .../app/service/blog/blog_create_service.py | 33 ++++++++++++------- .../app/test/test_blog_create_service.py | 15 ++++----- 2 files changed, 28 insertions(+), 20 deletions(-) diff --git a/apps/pre-processing-service/app/service/blog/blog_create_service.py b/apps/pre-processing-service/app/service/blog/blog_create_service.py index b2d61c21..29ce12b7 100644 --- a/apps/pre-processing-service/app/service/blog/blog_create_service.py +++ b/apps/pre-processing-service/app/service/blog/blog_create_service.py @@ -101,8 +101,12 @@ def _prepare_content_context(self, request: RequestBlogCreate) -> str: context_parts.append(f" {i}. {option}") # 구매 링크 - if request.product_info.get("url") or request.product_info.get("product_url"): - url = request.product_info.get("url") or request.product_info.get("product_url") + if request.product_info.get("url") or request.product_info.get( + "product_url" + ): + url = request.product_info.get("url") or request.product_info.get( + "product_url" + ) context_parts.append(f"- 구매 링크: {url}") return "\n".join(context_parts) if context_parts else "키워드 기반 콘텐츠 생성" @@ -112,7 +116,11 @@ def _create_content_prompt(self, context: str, request: RequestBlogCreate) -> st # 기본 키워드가 없으면 상품 제목에서 추출 main_keyword = request.keyword - if not main_keyword and request.product_info and request.product_info.get("title"): + if ( + not main_keyword + and request.product_info + and request.product_info.get("title") + ): main_keyword = request.product_info["title"] prompt = f""" @@ -164,7 +172,9 @@ def _generate_with_openai(self, prompt: str) -> str: self.logger.error(f"OpenAI API 호출 실패: {e}") raise - def _parse_generated_content(self, content: str, request: RequestBlogCreate) -> Dict[str, Any]: + def _parse_generated_content( + self, content: str, request: RequestBlogCreate + ) -> Dict[str, Any]: """생성된 콘텐츠를 파싱하여 구조화""" # 제목 추출 (첫 번째 h1이나 강조된 줄) @@ -187,18 +197,16 @@ def _parse_generated_content(self, content: str, request: RequestBlogCreate) -> # 키워드가 있으면 제목에 없을 경우 기본 제목 생성 if request.keyword and request.keyword not in title: if request.product_info and request.product_info.get("title"): - title = f"{request.product_info['title']} - {request.keyword} 완벽 가이드" + title = ( + f"{request.product_info['title']} - {request.keyword} 완벽 가이드" + ) else: title = f"{request.keyword} - 완벽 가이드" # 태그 생성 tags = self._generate_tags(request) - return { - "title": title, - "content": content, - "tags": tags - } + return {"title": title, "content": content, "tags": tags} def _generate_tags(self, request: RequestBlogCreate) -> List[str]: """요청 정보 기반 태그 생성""" @@ -293,9 +301,10 @@ def _create_fallback_content(self, request: RequestBlogCreate) -> Dict[str, Any] return { "title": title, "content": content, - "tags": self._generate_tags(request) + "tags": self._generate_tags(request), } + # if __name__ == '__main__': # # 테스트용 요청 데이터 # test_request = RequestBlogCreate( @@ -333,4 +342,4 @@ def _create_fallback_content(self, request: RequestBlogCreate) -> Dict[str, Any] # print(f"제목: {result['title']}") # print(f"\n태그: {', '.join(result['tags'])}") # print(f"\n내용:\n{result['content']}") -# print(f"\n글자수: {len(result['content'])}자") \ No newline at end of file +# print(f"\n글자수: {len(result['content'])}자") diff --git a/apps/pre-processing-service/app/test/test_blog_create_service.py b/apps/pre-processing-service/app/test/test_blog_create_service.py index 0c6d447c..d32e4e9e 100644 --- a/apps/pre-processing-service/app/test/test_blog_create_service.py +++ b/apps/pre-processing-service/app/test/test_blog_create_service.py @@ -8,8 +8,8 @@ class TestBlogContentGeneration(unittest.TestCase): """블로그 콘텐츠 생성 핵심 로직 테스트""" - @patch.dict('os.environ', {'OPENAI_API_KEY': 'test-key'}) - @patch('app.service.blog.blog_create_service.OpenAI') + @patch.dict("os.environ", {"OPENAI_API_KEY": "test-key"}) + @patch("app.service.blog.blog_create_service.OpenAI") def setUp(self, mock_openai_class): """테스트 설정 - OpenAI Mock 적용""" # Mock OpenAI 클라이언트 설정 @@ -35,10 +35,7 @@ def test_generate_blog_content_success(self): # 테스트 요청 request = RequestBlogCreate( keyword="아이폰 케이스", - product_info={ - "title": "아이폰 15 투명 케이스", - "price": 25000 - } + product_info={"title": "아이폰 15 투명 케이스", "price": 25000}, ) # 실행 @@ -49,7 +46,9 @@ def test_generate_blog_content_success(self): self.assertIn("content", result) self.assertIn("tags", result) # 실제 파싱 로직에 따른 제목 검증 (키워드가 제목에 포함되지 않아 기본 제목 생성됨) - self.assertEqual(result["title"], "아이폰 15 투명 케이스 - 아이폰 케이스 완벽 가이드") + self.assertEqual( + result["title"], "아이폰 15 투명 케이스 - 아이폰 케이스 완벽 가이드" + ) self.assertIn("

          ", result["content"]) self.assertIn("아이폰 케이스", result["tags"]) @@ -81,4 +80,4 @@ def test_generate_blog_content_minimal_input(self): # 기본 콘텐츠 생성 확인 self.assertEqual(result["title"], "상품 정보 및 구매 가이드") self.assertIn("

          ", result["content"]) - self.assertEqual(result["tags"], ["상품정보", "리뷰"]) \ No newline at end of file + self.assertEqual(result["tags"], ["상품정보", "리뷰"]) From 987692d3dbbca8fcbcc35635d536a75eb3ee495f Mon Sep 17 00:00:00 2001 From: thkim7 Date: Wed, 17 Sep 2025 11:42:25 +0900 Subject: [PATCH 41/57] chore: poetry run black . --- apps/pre-processing-service/app/utils/llm_extractor.py | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/pre-processing-service/app/utils/llm_extractor.py b/apps/pre-processing-service/app/utils/llm_extractor.py index 4263a270..3fb200a5 100644 --- a/apps/pre-processing-service/app/utils/llm_extractor.py +++ b/apps/pre-processing-service/app/utils/llm_extractor.py @@ -4,6 +4,7 @@ load_dotenv() + class LLMExtractor: def __init__(self, model="gpt-4o"): From 387dc16eb30abb6c5f3f4b8f88d99b717136a400 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EA=B2=BD=EB=AF=BC?= <153978154+kakusiA@users.noreply.github.com> Date: Wed, 17 Sep 2025 12:30:16 +0900 Subject: [PATCH 42/57] =?UTF-8?q?test:=20=EC=8A=A4=ED=94=84=EB=A7=81=20?= =?UTF-8?q?=EC=88=98=EB=8F=99=EC=8B=A4=ED=96=89=EC=9D=84=EC=9C=84=ED=95=9C?= =?UTF-8?q?=20url=20=EA=B0=9C=EB=B0=9C=20(#110)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * test: spring Fastapi testing * refactor: test-url response refactor * style: test-url response code formating --------- Co-authored-by: kakusia --- .../app/api/endpoints/sample.py | 45 +++++++++++++++++++ apps/pre-processing-service/app/api/router.py | 4 +- 2 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 apps/pre-processing-service/app/api/endpoints/sample.py diff --git a/apps/pre-processing-service/app/api/endpoints/sample.py b/apps/pre-processing-service/app/api/endpoints/sample.py new file mode 100644 index 00000000..f6d586fb --- /dev/null +++ b/apps/pre-processing-service/app/api/endpoints/sample.py @@ -0,0 +1,45 @@ +from fastapi import APIRouter +from ...model.schemas import * +from app.utils.response import Response + +router = APIRouter() + + +@router.get("/") +async def root(): + return {"message": "sample API"} + + +@router.post("/keywords/search", summary="네이버 키워드 검색") +async def search(request: RequestNaverSearch): + return Response.ok({"test": "hello world"}) + + +@router.post("/blogs/rag/create", summary="RAG 기반 블로그 콘텐츠 생성") +async def rag_create(request: RequestBlogCreate): + return Response.ok({"test": "hello world"}) + + +@router.post("/blogs/publish", summary="블로그 콘텐츠 배포") +async def publish(request: RequestBlogPublish): + return Response.ok({"test": "hello world"}) + + +@router.post("/products/search", summary="상품 검색") +async def product_search(request: RequestSadaguSearch): + return Response.ok({"test": "hello world"}) + + +@router.post("/products/match", summary="상품 매칭") +async def product_match(request: RequestSadaguMatch): + return Response.ok({"test": "hello world"}) + + +@router.post("/products/similarity", summary="상품 유사도 분석") +async def product_similarity(request: RequestSadaguSimilarity): + return Response.ok({"test": "hello world"}) + + +@router.post("/products/crawl", summary="상품 상세 정보 크롤링") +async def product_crawl(request: RequestSadaguCrawl): + return Response.ok({"test": "hello world"}) diff --git a/apps/pre-processing-service/app/api/router.py b/apps/pre-processing-service/app/api/router.py index 99286cf6..c1a2fcb4 100644 --- a/apps/pre-processing-service/app/api/router.py +++ b/apps/pre-processing-service/app/api/router.py @@ -1,6 +1,6 @@ # app/api/router.py from fastapi import APIRouter -from .endpoints import keywords, blog, product, test +from .endpoints import keywords, blog, product, test, sample from ..core.config import settings api_router = APIRouter() @@ -17,6 +17,8 @@ # 모듈 테스터를 위한 endpoint -> 추후 삭제 예정 api_router.include_router(test.router, prefix="/tests", tags=["Test"]) +api_router.include_router(sample.router, prefix="/v0", tags=["Sample"]) + @api_router.get("/ping") async def root(): From 9377488dcb72a15b15715aa2632a4946e2191fd8 Mon Sep 17 00:00:00 2001 From: JiHoon Date: Wed, 17 Sep 2025 12:46:47 +0900 Subject: [PATCH 43/57] =?UTF-8?q?fix=20:=20local=ED=99=98=EA=B2=BD=20docke?= =?UTF-8?q?r-compose.yml=20network=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docker/local/docker-compose.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/docker/local/docker-compose.yml b/docker/local/docker-compose.yml index c69ea697..146f0534 100644 --- a/docker/local/docker-compose.yml +++ b/docker/local/docker-compose.yml @@ -47,6 +47,8 @@ services: interval: 10s timeout: 5s retries: 5 + networks: + - icebang-network grafana: image: grafana/grafana:10.1.0 @@ -65,8 +67,14 @@ services: interval: 30s timeout: 10s retries: 5 + networks: + - icebang-network volumes: mariadb_data: loki_data: - grafana_data: \ No newline at end of file + grafana_data: + +networks: + icebang-network: + driver: bridge \ No newline at end of file From b9525379fb04f9d9e14566cc9c65a3618f8b66e6 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Wed, 17 Sep 2025 12:54:59 +0900 Subject: [PATCH 44/57] feat: blog rag create endpoint --- apps/pre-processing-service/app/api/endpoints/blog.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/apps/pre-processing-service/app/api/endpoints/blog.py b/apps/pre-processing-service/app/api/endpoints/blog.py index 68a23496..79452e4a 100644 --- a/apps/pre-processing-service/app/api/endpoints/blog.py +++ b/apps/pre-processing-service/app/api/endpoints/blog.py @@ -8,6 +8,7 @@ BloggerBlogPostAdapter, ) # 수정된 import from app.utils.response import Response +from app.service.blog.blog_create_service import BlogContentService router = APIRouter() @@ -21,8 +22,10 @@ async def rag_create(request: RequestBlogCreate): """ RAG 기반 블로그 콘텐츠 생성 """ - return {"message": "blog API"} + blog_service = BlogContentService() + response_data = blog_service.generate_blog_content(request) + return Response.ok(response_data) @router.post( "/publish", From bb0eb2d5ddd4ee0935e2f2dd6e452364882ddd39 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Wed, 17 Sep 2025 14:00:39 +0900 Subject: [PATCH 45/57] =?UTF-8?q?refactor:=20blog=20service=20=EB=A6=AC?= =?UTF-8?q?=ED=8C=A9=ED=86=A0=EB=A7=81=201.=20endpoint=EC=97=90=20?= =?UTF-8?q?=EC=9E=88=EB=8A=94=20if-else=20=EC=84=9C=EB=B9=84=EC=8A=A4?= =?UTF-8?q?=EB=A1=9C=20=EC=9D=B4=EB=8F=99=202.=20=ED=98=84=EC=9E=AC=20?= =?UTF-8?q?=EC=9E=88=EB=8A=94=203=EA=B0=9C=EC=9D=98=20=EB=B8=94=EB=A1=9C?= =?UTF-8?q?=EA=B7=B8=20=EB=AA=A8=EB=91=90=20=ED=8F=AC=EC=8A=A4=ED=8C=85=20?= =?UTF-8?q?=ED=98=95=EC=8B=9D=EC=9D=B4=20=EB=8F=99=EC=9D=BC=ED=95=B4?= =?UTF-8?q?=EC=84=9C=20factory=20method=20=EC=A0=81=EC=9A=A9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/api/endpoints/blog.py | 47 ++----------------- .../app/service/blog/blog_publish_service.py | 44 +++++++++++++++++ .../app/service/blog/blog_service_factory.py | 38 +++++++++++++++ 3 files changed, 86 insertions(+), 43 deletions(-) create mode 100644 apps/pre-processing-service/app/service/blog/blog_publish_service.py create mode 100644 apps/pre-processing-service/app/service/blog/blog_service_factory.py diff --git a/apps/pre-processing-service/app/api/endpoints/blog.py b/apps/pre-processing-service/app/api/endpoints/blog.py index 79452e4a..c4a206ff 100644 --- a/apps/pre-processing-service/app/api/endpoints/blog.py +++ b/apps/pre-processing-service/app/api/endpoints/blog.py @@ -9,6 +9,7 @@ ) # 수정된 import from app.utils.response import Response from app.service.blog.blog_create_service import BlogContentService +from app.service.blog.blog_publish_service import BlogPublishService router = APIRouter() @@ -38,47 +39,7 @@ async def publish(request: RequestBlogPublish): 네이버 블로그와 티스토리 블로그를 지원하며, 현재는 생성된 콘텐츠가 아닌 임의의 제목, 내용, 태그를 배포합니다. """ - if request.tag == "naver": - naver_service = NaverBlogPostService() - response_data = naver_service.post_content( - title=request.post_title, - content=request.post_content, - tags=request.post_tags, - ) + publish_service = BlogPublishService() + response_data = publish_service.publish_content(request) - if not response_data: - raise CustomException( - "네이버 블로그 포스팅에 실패했습니다.", status_code=500 - ) - - return Response.ok(response_data) - - elif request.tag == "tistory": - tistory_service = TistoryBlogPostService() - response_data = tistory_service.post_content( - title=request.post_title, - content=request.post_content, - tags=request.post_tags, - ) - - if not response_data: - raise CustomException( - "티스토리 블로그 포스팅에 실패했습니다.", status_code=500 - ) - - return Response.ok(response_data) - - elif request.tag == "blogger": - blogger_service = BloggerBlogPostAdapter() # 수정: Adapter 사용 - response_data = blogger_service.post_content( - title=request.post_title, - content=request.post_content, - tags=request.post_tags, - ) - - if not response_data: - raise CustomException( - "블로거 블로그 포스팅에 실패했습니다.", status_code=500 - ) - - return Response.ok(response_data) + return Response.ok(response_data) diff --git a/apps/pre-processing-service/app/service/blog/blog_publish_service.py b/apps/pre-processing-service/app/service/blog/blog_publish_service.py new file mode 100644 index 00000000..ee2831a4 --- /dev/null +++ b/apps/pre-processing-service/app/service/blog/blog_publish_service.py @@ -0,0 +1,44 @@ +from typing import Dict +from app.errors.CustomException import CustomException +from app.model.schemas import RequestBlogPublish +from app.service.blog.blog_service_factory import BlogServiceFactory + + +class BlogPublishService: + """블로그 발행을 담당하는 서비스 클래스""" + + def __init__(self): + self.factory = BlogServiceFactory() + + def publish_content(self, request: RequestBlogPublish) -> Dict: + """ + 생성된 블로그 콘텐츠를 배포합니다. + """ + try: + # 팩토리를 통해 적절한 서비스 생성 + blog_service = self.factory.create_service(request.tag) + + # 공통 인터페이스로 포스팅 실행 + response_data = blog_service.post_content( + title=request.post_title, + content=request.post_content, + tags=request.post_tags, + ) + + if not response_data: + raise CustomException( + f"{request.tag} 블로그 포스팅에 실패했습니다.", + status_code=500 + ) + + return response_data + + except CustomException: + # 이미 처리된 예외는 그대로 전달 + raise + except Exception as e: + # 예상치 못한 예외 처리 + raise CustomException( + f"블로그 포스팅 중 오류가 발생했습니다: {str(e)}", + status_code=500 + ) \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/blog/blog_service_factory.py b/apps/pre-processing-service/app/service/blog/blog_service_factory.py new file mode 100644 index 00000000..087b24a4 --- /dev/null +++ b/apps/pre-processing-service/app/service/blog/blog_service_factory.py @@ -0,0 +1,38 @@ +from typing import Dict, Type +from app.service.blog.base_blog_post_service import BaseBlogPostService +from app.service.blog.naver_blog_post_service import NaverBlogPostService +from app.service.blog.tistory_blog_post_service import TistoryBlogPostService +from app.service.blog.blogger_blog_post_adapter import BloggerBlogPostAdapter +from app.errors.CustomException import CustomException + + +class BlogServiceFactory: + """블로그 서비스 객체 생성을 담당하는 팩토리""" + + # 서비스 타입별 클래스 매핑 + _services: Dict[str, Type[BaseBlogPostService]] = { + "naver": NaverBlogPostService, + "tistory": TistoryBlogPostService, + "blogger": BloggerBlogPostAdapter, + } + + @classmethod + def create_service(cls, platform: str) -> BaseBlogPostService: + """ + 플랫폼에 따른 블로그 서비스 인스턴스 생성 + """ + service_class = cls._services.get(platform.lower()) + + if not service_class: + raise CustomException( + f"지원하지 않는 플랫폼입니다: {platform}. " + f"지원 플랫폼: {list(cls._services.keys())}", + status_code=400 + ) + + return service_class() + + @classmethod + def get_supported_platforms(cls) -> list: + """지원하는 플랫폼 목록 반환""" + return list(cls._services.keys()) From 67209027dfee83f83f5c999da108ad142d228dc2 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Wed, 17 Sep 2025 14:05:40 +0900 Subject: [PATCH 46/57] chore: poetry run black . --- apps/pre-processing-service/app/api/endpoints/blog.py | 1 + .../app/service/blog/blog_publish_service.py | 8 +++----- .../app/service/blog/blog_service_factory.py | 2 +- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/apps/pre-processing-service/app/api/endpoints/blog.py b/apps/pre-processing-service/app/api/endpoints/blog.py index c4a206ff..d0d078e8 100644 --- a/apps/pre-processing-service/app/api/endpoints/blog.py +++ b/apps/pre-processing-service/app/api/endpoints/blog.py @@ -28,6 +28,7 @@ async def rag_create(request: RequestBlogCreate): return Response.ok(response_data) + @router.post( "/publish", response_model=ResponseBlogPublish, diff --git a/apps/pre-processing-service/app/service/blog/blog_publish_service.py b/apps/pre-processing-service/app/service/blog/blog_publish_service.py index ee2831a4..56ad9f06 100644 --- a/apps/pre-processing-service/app/service/blog/blog_publish_service.py +++ b/apps/pre-processing-service/app/service/blog/blog_publish_service.py @@ -27,8 +27,7 @@ def publish_content(self, request: RequestBlogPublish) -> Dict: if not response_data: raise CustomException( - f"{request.tag} 블로그 포스팅에 실패했습니다.", - status_code=500 + f"{request.tag} 블로그 포스팅에 실패했습니다.", status_code=500 ) return response_data @@ -39,6 +38,5 @@ def publish_content(self, request: RequestBlogPublish) -> Dict: except Exception as e: # 예상치 못한 예외 처리 raise CustomException( - f"블로그 포스팅 중 오류가 발생했습니다: {str(e)}", - status_code=500 - ) \ No newline at end of file + f"블로그 포스팅 중 오류가 발생했습니다: {str(e)}", status_code=500 + ) diff --git a/apps/pre-processing-service/app/service/blog/blog_service_factory.py b/apps/pre-processing-service/app/service/blog/blog_service_factory.py index 087b24a4..b6bc6883 100644 --- a/apps/pre-processing-service/app/service/blog/blog_service_factory.py +++ b/apps/pre-processing-service/app/service/blog/blog_service_factory.py @@ -27,7 +27,7 @@ def create_service(cls, platform: str) -> BaseBlogPostService: raise CustomException( f"지원하지 않는 플랫폼입니다: {platform}. " f"지원 플랫폼: {list(cls._services.keys())}", - status_code=400 + status_code=400, ) return service_class() From 6cbcf64bcc960209a13348724b112baff00aa5d6 Mon Sep 17 00:00:00 2001 From: bwnfo3 Date: Wed, 17 Sep 2025 14:09:00 +0900 Subject: [PATCH 47/57] =?UTF-8?q?fix:=20=ED=95=84=EC=9A=94=EC=97=86?= =?UTF-8?q?=EB=8A=94=20=EB=A9=94=EC=84=9C=EB=93=9C=20=EC=82=AD=EC=A0=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../icebang/domain/workflow/service/WorkflowService.java | 5 ----- 1 file changed, 5 deletions(-) diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java index 052b96fa..f66cb61c 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java @@ -27,9 +27,4 @@ public PageResult getPagedResult(PageParams pageParams) { () -> workflowMapper.selectWorkflowList(pageParams), () -> workflowMapper.selectWorkflowCount(pageParams)); } - - @Transactional(readOnly = true) - public WorkflowCardDto getWorkflowById(BigInteger id) { - return workflowMapper.selectWorkflowById(id); - } } From cd784d173cd9f93754f53b0799831b373f530e51 Mon Sep 17 00:00:00 2001 From: bwnfo3 Date: Wed, 17 Sep 2025 14:12:15 +0900 Subject: [PATCH 48/57] fix: spotlessApply --- .../site/icebang/domain/workflow/service/WorkflowService.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java index f66cb61c..71600b4b 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java @@ -1,7 +1,5 @@ package site.icebang.domain.workflow.service; -import java.math.BigInteger; - import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; From b1ff78504d5947e14615d83b204e390818d1aae9 Mon Sep 17 00:00:00 2001 From: Jihu Kim Date: Wed, 17 Sep 2025 14:25:00 +0900 Subject: [PATCH 49/57] =?UTF-8?q?=EC=9B=8C=ED=81=AC=ED=94=8C=EB=A1=9C?= =?UTF-8?q?=EC=9A=B0=20=EC=8B=A4=ED=96=89=20=EB=B0=8F=20Quartz=20=EC=8A=A4?= =?UTF-8?q?=EC=BC=80=EC=A4=84=EB=9F=AC=20=EC=97=B0=EB=8F=99=20(#106)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: workflow 도메인 모델 생성 * chore: Job 도메인 모델 생성 * refactor: schedule 도메인 디렉토리 위치 이동 * chore: Workflow 도메인관련 Mapper 및 Service 설정 * chore: execution 도메인 생성 및 mapper 설정 * chore: schedule 도메인 생성 및 mapper 설정 * chore: Java 표준 라이프사이클 어노테이션 설정 추가 * refactor: workflow 도메인 디렉토리 위치 변경 * refactor: ScheduleMapper 메서드명 변경 * feature: Workflow 수동 실행(REST API) * fix: 불필요한 주석 제거 * chore: 불필요한 의존성 제거 및 Spring Quartz 의존성 추가 * fix: 불필요한 batch 패키지 삭제 * fix: 불필요한 batch 패키지 삭제 * fix(deprecated): FastApiDto 수정 예정 * refactor: Spring 내장 스케줄러 삭제 * refactor: Spring 내장 스케줄러 설정 삭제 * fix: 불필요한 WorkflowJobMapper 삭제 * chore: job 도메인 세팅 * chore: JobRun 도메인 세팅 * chore: Schedule 도메인 세팅 * chore: Task 도메인 세팅 * chore: TaskRun 도메인 세팅 * chore: WorkflowRun 도메인 세팅 * chore: Workflow 도메인 세팅 * chore: Spring Quartz 세팅 * chore: MyBatis JsonNodeTypeHandler 세팅 * refactor: Code Formatting * fix: 불필요한 import 삭제 * refactor: execution(TaskRun, JobRun, WorkflowRun) 도메인 설정 * refactor: Quartz 관련 datasource 설정 필요 * refactor: Mapper.xml 수정 * refactor: Code Formatting --- apps/user-service/build.gradle | 4 +- .../site/icebang/UserServiceApplication.java | 4 - .../icebang/batch/common/JobContextKeys.java | 15 --- .../batch/job/BlogAutomationJobConfig.java | 115 ------------------ .../tasklet/CrawlSelectedProductTasklet.java | 60 --------- .../tasklet/ExtractTrendKeywordTasklet.java | 51 -------- .../tasklet/FindSimilarProductsTasklet.java | 60 --------- .../tasklet/GenerateBlogContentTasklet.java | 62 ---------- .../MatchProductWithKeywordTasklet.java | 57 --------- .../batch/tasklet/PublishBlogPostTasklet.java | 68 ----------- .../SearchProductsFromMallTasklet.java | 58 --------- .../domain/execution/mapper/JobRunMapper.java | 12 ++ .../execution/mapper/TaskRunMapper.java | 12 ++ .../execution/mapper/WorkflowRunMapper.java | 12 ++ .../domain/execution/model/JobRun.java | 38 ++++++ .../domain/execution/model/TaskRun.java | 43 +++++++ .../domain/execution/model/WorkflowRun.java | 39 ++++++ .../schedule/mapper/ScheduleMapper.java | 12 ++ .../domain/schedule/model/Schedule.java | 31 +++++ .../service/QuartzScheduleService.java | 43 +++++++ .../controller/WorkflowController.java | 17 ++- .../domain/workflow/dto/WorkflowCardDto.java | 13 +- .../domain/workflow/mapper/JobMapper.java | 15 +++ .../domain/workflow/mapper/TaskMapper.java | 12 ++ .../workflow/mapper/WorkflowMapper.java | 19 +++ .../icebang/domain/workflow/model/Job.java | 22 ++++ .../icebang/domain/workflow/model/Task.java | 20 +++ .../domain/workflow/model/Workflow.java | 28 +++++ .../workflow/runner/HttpTaskRunner.java | 49 ++++++++ .../domain/workflow/runner/TaskRunner.java | 22 ++++ .../scheduler/WorkflowTriggerJob.java | 24 ++++ .../service/WorkflowExecutionService.java | 111 +++++++++++++++++ .../fastapi/adapter/FastApiAdapter.java | 106 ---------------- .../external/fastapi/dto/FastApiDto.java | 103 ---------------- .../global/aop/logging/LoggingAspect.java | 14 --- .../config/QuartzSchedulerInitializer.java | 33 +++++ .../typehandler/JsonNodeTypeHandler.java | 56 +++++++++ .../config/scheduler/SchedulerConfig.java | 28 ----- .../schedule/mapper/ScheduleMapper.java | 12 -- .../site/icebang/schedule/model/Schedule.java | 14 --- .../schedule/runner/SchedulerInitializer.java | 31 ----- .../service/DynamicSchedulerService.java | 63 ---------- .../main/resources/application-develop.yml | 14 +++ .../main/resources/application-production.yml | 4 + .../src/main/resources/application.yml | 1 + .../resources/mybatis/mapper/JobMapper.xml | 37 ++++++ .../resources/mybatis/mapper/JobRunMapper.xml | 28 +++++ .../mybatis/mapper/ScheduleMapper.xml | 18 +-- .../resources/mybatis/mapper/TaskMapper.xml | 18 +++ .../mybatis/mapper/TaskRunMapper.xml | 17 +++ .../mybatis/mapper/WorkflowMapper.xml | 42 +++++++ .../mybatis/mapper/WorkflowRunMapper.xml | 28 +++++ 52 files changed, 873 insertions(+), 942 deletions(-) delete mode 100644 apps/user-service/src/main/java/site/icebang/batch/common/JobContextKeys.java delete mode 100644 apps/user-service/src/main/java/site/icebang/batch/job/BlogAutomationJobConfig.java delete mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/CrawlSelectedProductTasklet.java delete mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/ExtractTrendKeywordTasklet.java delete mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/FindSimilarProductsTasklet.java delete mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/GenerateBlogContentTasklet.java delete mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/MatchProductWithKeywordTasklet.java delete mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/PublishBlogPostTasklet.java delete mode 100644 apps/user-service/src/main/java/site/icebang/batch/tasklet/SearchProductsFromMallTasklet.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/execution/mapper/JobRunMapper.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/execution/mapper/TaskRunMapper.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/execution/mapper/WorkflowRunMapper.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/execution/model/JobRun.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/execution/model/TaskRun.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/execution/model/WorkflowRun.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/schedule/service/QuartzScheduleService.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/JobMapper.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/TaskMapper.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/runner/HttpTaskRunner.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/runner/TaskRunner.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/scheduler/WorkflowTriggerJob.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java delete mode 100644 apps/user-service/src/main/java/site/icebang/external/fastapi/adapter/FastApiAdapter.java delete mode 100644 apps/user-service/src/main/java/site/icebang/external/fastapi/dto/FastApiDto.java create mode 100644 apps/user-service/src/main/java/site/icebang/global/config/QuartzSchedulerInitializer.java create mode 100644 apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/JsonNodeTypeHandler.java delete mode 100644 apps/user-service/src/main/java/site/icebang/global/config/scheduler/SchedulerConfig.java delete mode 100644 apps/user-service/src/main/java/site/icebang/schedule/mapper/ScheduleMapper.java delete mode 100644 apps/user-service/src/main/java/site/icebang/schedule/model/Schedule.java delete mode 100644 apps/user-service/src/main/java/site/icebang/schedule/runner/SchedulerInitializer.java delete mode 100644 apps/user-service/src/main/java/site/icebang/schedule/service/DynamicSchedulerService.java create mode 100644 apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml create mode 100644 apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml create mode 100644 apps/user-service/src/main/resources/mybatis/mapper/TaskMapper.xml create mode 100644 apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml create mode 100644 apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml create mode 100644 apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml diff --git a/apps/user-service/build.gradle b/apps/user-service/build.gradle index 29f095ea..096e6d65 100644 --- a/apps/user-service/build.gradle +++ b/apps/user-service/build.gradle @@ -44,8 +44,8 @@ dependencies { // MyBatis implementation 'org.mybatis.spring.boot:mybatis-spring-boot-starter:3.0.5' - // batch - implementation 'org.springframework.boot:spring-boot-starter-batch' + // Scheduler + implementation 'org.springframework.boot:spring-boot-starter-quartz' implementation 'org.springframework.boot:spring-boot-starter-log4j2' implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml' diff --git a/apps/user-service/src/main/java/site/icebang/UserServiceApplication.java b/apps/user-service/src/main/java/site/icebang/UserServiceApplication.java index 68da9f2a..29e975ba 100644 --- a/apps/user-service/src/main/java/site/icebang/UserServiceApplication.java +++ b/apps/user-service/src/main/java/site/icebang/UserServiceApplication.java @@ -1,13 +1,9 @@ package site.icebang; import org.mybatis.spring.annotation.MapperScan; -import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.scheduling.annotation.EnableScheduling; -@EnableScheduling -@EnableBatchProcessing @SpringBootApplication @MapperScan("site.icebang.**.mapper") public class UserServiceApplication { diff --git a/apps/user-service/src/main/java/site/icebang/batch/common/JobContextKeys.java b/apps/user-service/src/main/java/site/icebang/batch/common/JobContextKeys.java deleted file mode 100644 index d28b7bd0..00000000 --- a/apps/user-service/src/main/java/site/icebang/batch/common/JobContextKeys.java +++ /dev/null @@ -1,15 +0,0 @@ -package site.icebang.batch.common; - -/** - * Spring Batch의 JobExecutionContext에서 Step 간 데이터 공유를 위해 사용되는 Key들을 상수로 정의하는 인터페이스. 모든 Tasklet은 이 - * 인터페이스를 참조하여 데이터의 일관성을 유지합니다. - */ -public interface JobContextKeys { - - String EXTRACTED_KEYWORD = "extractedKeyword"; - String SEARCHED_PRODUCTS = "searchedProducts"; - String MATCHED_PRODUCTS = "matchedProducts"; - String SELECTED_PRODUCT = "selectedProduct"; - String CRAWLED_PRODUCT_DETAIL = "crawledProductDetail"; - String GENERATED_CONTENT = "generatedContent"; -} diff --git a/apps/user-service/src/main/java/site/icebang/batch/job/BlogAutomationJobConfig.java b/apps/user-service/src/main/java/site/icebang/batch/job/BlogAutomationJobConfig.java deleted file mode 100644 index d0c934b9..00000000 --- a/apps/user-service/src/main/java/site/icebang/batch/job/BlogAutomationJobConfig.java +++ /dev/null @@ -1,115 +0,0 @@ -package site.icebang.batch.job; // 패키지 경로 수정 - -import org.springframework.batch.core.Job; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.job.builder.JobBuilder; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.step.builder.StepBuilder; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.transaction.PlatformTransactionManager; - -import lombok.RequiredArgsConstructor; - -import site.icebang.batch.tasklet.*; - -/** [배치 시스템 구현] 트렌드 기반 블로그 자동화 워크플로우를 구성하는 Job들을 정의합니다. */ -@Configuration -@RequiredArgsConstructor -public class BlogAutomationJobConfig { - - // --- Tasklets --- - private final ExtractTrendKeywordTasklet extractTrendKeywordTask; - private final SearchProductsFromMallTasklet searchProductsFromMallTask; - private final MatchProductWithKeywordTasklet matchProductWithKeywordTask; - private final FindSimilarProductsTasklet findSimilarProductsTask; - private final CrawlSelectedProductTasklet crawlSelectedProductTask; - private final GenerateBlogContentTasklet generateBlogContentTask; - private final PublishBlogPostTasklet publishBlogPostTask; - - /** Job 1: 상품 선정 및 정보 수집 키워드 추출부터 최종 상품 정보 크롤링까지의 과정을 책임집니다. */ - @Bean - public Job productSelectionJob( - JobRepository jobRepository, - Step extractTrendKeywordStep, - Step searchProductsFromMallStep, - Step matchProductWithKeywordStep, - Step findSimilarProductsStep, - Step crawlSelectedProductStep) { - return new JobBuilder("productSelectionJob", jobRepository) - .start(extractTrendKeywordStep) - .next(searchProductsFromMallStep) - .next(matchProductWithKeywordStep) - .next(findSimilarProductsStep) - .next(crawlSelectedProductStep) - .build(); - } - - /** Job 2: 콘텐츠 생성 및 발행 수집된 상품 정보로 블로그 콘텐츠를 생성하고 발행합니다. */ - @Bean - public Job contentPublishingJob( - JobRepository jobRepository, Step generateBlogContentStep, Step publishBlogPostStep) { - return new JobBuilder("contentPublishingJob", jobRepository) - .start(generateBlogContentStep) - .next(publishBlogPostStep) - .build(); - } - - // --- Steps for productSelectionJob --- - @Bean - public Step extractTrendKeywordStep( - JobRepository jobRepository, PlatformTransactionManager transactionManager) { - return new StepBuilder("extractTrendKeywordStep", jobRepository) - .tasklet(extractTrendKeywordTask, transactionManager) - .build(); - } - - @Bean - public Step searchProductsFromMallStep( - JobRepository jobRepository, PlatformTransactionManager transactionManager) { - return new StepBuilder("searchProductsFromMallStep", jobRepository) - .tasklet(searchProductsFromMallTask, transactionManager) - .build(); - } - - @Bean - public Step matchProductWithKeywordStep( - JobRepository jobRepository, PlatformTransactionManager transactionManager) { - return new StepBuilder("matchProductWithKeywordStep", jobRepository) - .tasklet(matchProductWithKeywordTask, transactionManager) - .build(); - } - - @Bean - public Step findSimilarProductsStep( - JobRepository jobRepository, PlatformTransactionManager transactionManager) { - return new StepBuilder("findSimilarProductsStep", jobRepository) - .tasklet(findSimilarProductsTask, transactionManager) - .build(); - } - - @Bean - public Step crawlSelectedProductStep( - JobRepository jobRepository, PlatformTransactionManager transactionManager) { - return new StepBuilder("crawlSelectedProductStep", jobRepository) - .tasklet(crawlSelectedProductTask, transactionManager) - .build(); - } - - // --- Steps for contentPublishingJob --- - @Bean - public Step generateBlogContentStep( - JobRepository jobRepository, PlatformTransactionManager transactionManager) { - return new StepBuilder("generateBlogContentStep", jobRepository) - .tasklet(generateBlogContentTask, transactionManager) - .build(); - } - - @Bean - public Step publishBlogPostStep( - JobRepository jobRepository, PlatformTransactionManager transactionManager) { - return new StepBuilder("publishBlogPostStep", jobRepository) - .tasklet(publishBlogPostTask, transactionManager) - .build(); - } -} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/CrawlSelectedProductTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/CrawlSelectedProductTasklet.java deleted file mode 100644 index 6a182c37..00000000 --- a/apps/user-service/src/main/java/site/icebang/batch/tasklet/CrawlSelectedProductTasklet.java +++ /dev/null @@ -1,60 +0,0 @@ -package site.icebang.batch.tasklet; - -import java.util.Map; - -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.stereotype.Component; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import site.icebang.batch.common.JobContextKeys; -import site.icebang.external.fastapi.adapter.FastApiAdapter; -import site.icebang.external.fastapi.dto.FastApiDto.RequestSsadaguCrawl; -import site.icebang.external.fastapi.dto.FastApiDto.ResponseSsadaguCrawl; - -@Slf4j -@Component -@RequiredArgsConstructor -public class CrawlSelectedProductTasklet implements Tasklet { - - private final FastApiAdapter fastApiAdapter; - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) - throws Exception { - // log.info(">>>> [Step 5] 최종 상품 크롤링 Tasklet 실행 시작"); - - ExecutionContext jobExecutionContext = getJobExecutionContext(chunkContext); - Map selectedProduct = - (Map) jobExecutionContext.get(JobContextKeys.SELECTED_PRODUCT); - - if (selectedProduct == null || !selectedProduct.containsKey("link")) { - throw new RuntimeException("크롤링할 상품 URL이 없습니다."); - } - String productUrl = (String) selectedProduct.get("link"); - - RequestSsadaguCrawl request = new RequestSsadaguCrawl(1, 1, null, "detail", productUrl); - ResponseSsadaguCrawl response = fastApiAdapter.requestProductCrawl(request); - - if (response == null || !"200".equals(response.status())) { - throw new RuntimeException("FastAPI 상품 크롤링에 실패했습니다."); - } - - Map productDetail = response.productDetail(); - log.info(">>>> FastAPI로부터 크롤링된 상품 상세 정보 획득"); - - jobExecutionContext.put(JobContextKeys.CRAWLED_PRODUCT_DETAIL, productDetail); - - // log.info(">>>> [Step 5] 최종 상품 크롤링 Tasklet 실행 완료"); - return RepeatStatus.FINISHED; - } - - private ExecutionContext getJobExecutionContext(ChunkContext chunkContext) { - return chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); - } -} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/ExtractTrendKeywordTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/ExtractTrendKeywordTasklet.java deleted file mode 100644 index a35bebf9..00000000 --- a/apps/user-service/src/main/java/site/icebang/batch/tasklet/ExtractTrendKeywordTasklet.java +++ /dev/null @@ -1,51 +0,0 @@ -package site.icebang.batch.tasklet; - -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.stereotype.Component; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import site.icebang.batch.common.JobContextKeys; -import site.icebang.external.fastapi.adapter.FastApiAdapter; -import site.icebang.external.fastapi.dto.FastApiDto.RequestNaverSearch; -import site.icebang.external.fastapi.dto.FastApiDto.ResponseNaverSearch; - -@Slf4j -@Component -@RequiredArgsConstructor -public class ExtractTrendKeywordTasklet implements Tasklet { - - private final FastApiAdapter fastApiAdapter; - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) - throws Exception { - // log.info(">>>> [Step 1] 키워드 추출 Tasklet 실행 시작"); - - RequestNaverSearch request = - new RequestNaverSearch(1, 1, null, "naver", "50000000", null, null); - ResponseNaverSearch response = fastApiAdapter.requestNaverKeywordSearch(request); - - if (response == null || !"200".equals(response.status())) { - throw new RuntimeException("FastAPI로부터 키워드를 추출하는 데 실패했습니다."); - } - String extractedKeyword = response.keyword(); - log.info(">>>> FastAPI로부터 추출된 키워드: {}", extractedKeyword); - - ExecutionContext jobExecutionContext = getJobExecutionContext(chunkContext); - // 다른 클래스의 상수를 직접 참조하는 대신 공용 인터페이스의 키를 사용 - jobExecutionContext.put(JobContextKeys.EXTRACTED_KEYWORD, extractedKeyword); - - // log.info(">>>> [Step 1] 키워드 추출 Tasklet 실행 완료"); - return RepeatStatus.FINISHED; - } - - private ExecutionContext getJobExecutionContext(ChunkContext chunkContext) { - return chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); - } -} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/FindSimilarProductsTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/FindSimilarProductsTasklet.java deleted file mode 100644 index 316641e1..00000000 --- a/apps/user-service/src/main/java/site/icebang/batch/tasklet/FindSimilarProductsTasklet.java +++ /dev/null @@ -1,60 +0,0 @@ -package site.icebang.batch.tasklet; - -import java.util.List; -import java.util.Map; - -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.stereotype.Component; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import site.icebang.batch.common.JobContextKeys; -import site.icebang.external.fastapi.adapter.FastApiAdapter; -import site.icebang.external.fastapi.dto.FastApiDto.RequestSsadaguSimilarity; -import site.icebang.external.fastapi.dto.FastApiDto.ResponseSsadaguSimilarity; - -@Slf4j -@Component -@RequiredArgsConstructor -public class FindSimilarProductsTasklet implements Tasklet { - - private final FastApiAdapter fastApiAdapter; - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) - throws Exception { - // log.info(">>>> [Step 4] 상품 유사도 분석 Tasklet 실행 시작"); - - ExecutionContext jobExecutionContext = getJobExecutionContext(chunkContext); - String keyword = (String) jobExecutionContext.get(JobContextKeys.EXTRACTED_KEYWORD); - List> matchedProducts = - (List>) jobExecutionContext.get(JobContextKeys.MATCHED_PRODUCTS); - List> searchResults = - (List>) jobExecutionContext.get(JobContextKeys.SEARCHED_PRODUCTS); - - RequestSsadaguSimilarity request = - new RequestSsadaguSimilarity(1, 1, null, keyword, matchedProducts, searchResults); - ResponseSsadaguSimilarity response = fastApiAdapter.requestProductSimilarity(request); - - if (response == null || !"200".equals(response.status())) { - throw new RuntimeException("FastAPI 상품 유사도 분석에 실패했습니다."); - } - - Map selectedProduct = response.selectedProduct(); - log.info(">>>> FastAPI로부터 최종 선택된 상품: {}", selectedProduct.get("title")); - - jobExecutionContext.put(JobContextKeys.SELECTED_PRODUCT, selectedProduct); - - // log.info(">>>> [Step 4] 상품 유사도 분석 Tasklet 실행 완료"); - return RepeatStatus.FINISHED; - } - - private ExecutionContext getJobExecutionContext(ChunkContext chunkContext) { - return chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); - } -} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/GenerateBlogContentTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/GenerateBlogContentTasklet.java deleted file mode 100644 index ecf44cbb..00000000 --- a/apps/user-service/src/main/java/site/icebang/batch/tasklet/GenerateBlogContentTasklet.java +++ /dev/null @@ -1,62 +0,0 @@ -package site.icebang.batch.tasklet; - -import java.util.List; -import java.util.Map; - -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.stereotype.Component; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import site.icebang.batch.common.JobContextKeys; -import site.icebang.external.fastapi.adapter.FastApiAdapter; -import site.icebang.external.fastapi.dto.FastApiDto.RequestBlogCreate; -import site.icebang.external.fastapi.dto.FastApiDto.ResponseBlogCreate; - -@Slf4j -@Component -@RequiredArgsConstructor -public class GenerateBlogContentTasklet implements Tasklet { - - private final FastApiAdapter fastApiAdapter; - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) - throws Exception { - // log.info(">>>> [Step 6] 블로그 콘텐츠 생성 Tasklet 실행 시작"); - - ExecutionContext jobExecutionContext = getJobExecutionContext(chunkContext); - Map productDetail = - (Map) jobExecutionContext.get(JobContextKeys.CRAWLED_PRODUCT_DETAIL); - - // TODO: productDetail을 기반으로 LLM에 전달할 프롬프트 생성 - RequestBlogCreate request = new RequestBlogCreate(1, 1, null); - ResponseBlogCreate response = fastApiAdapter.requestBlogCreation(request); - - if (response == null || !"200".equals(response.status())) { - throw new RuntimeException("FastAPI 블로그 콘텐츠 생성에 실패했습니다."); - } - - // TODO: 실제 생성된 콘텐츠를 response로부터 받아와야 함 (현재는 더미 데이터) - Map generatedContent = - Map.of( - "title", "엄청난 상품을 소개합니다! " + productDetail.get("title"), - "content", "이 상품은 정말... 좋습니다. 상세 정보: " + productDetail.toString(), - "tags", List.of("상품리뷰", "최고")); - log.info(">>>> FastAPI로부터 블로그 콘텐츠 생성 완료"); - - jobExecutionContext.put(JobContextKeys.GENERATED_CONTENT, generatedContent); - - // log.info(">>>> [Step 6] 블로그 콘텐츠 생성 Tasklet 실행 완료"); - return RepeatStatus.FINISHED; - } - - private ExecutionContext getJobExecutionContext(ChunkContext chunkContext) { - return chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); - } -} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/MatchProductWithKeywordTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/MatchProductWithKeywordTasklet.java deleted file mode 100644 index bdb15200..00000000 --- a/apps/user-service/src/main/java/site/icebang/batch/tasklet/MatchProductWithKeywordTasklet.java +++ /dev/null @@ -1,57 +0,0 @@ -package site.icebang.batch.tasklet; - -import java.util.List; -import java.util.Map; - -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.stereotype.Component; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import site.icebang.batch.common.JobContextKeys; -import site.icebang.external.fastapi.adapter.FastApiAdapter; -import site.icebang.external.fastapi.dto.FastApiDto.RequestSsadaguMatch; -import site.icebang.external.fastapi.dto.FastApiDto.ResponseSsadaguMatch; - -@Slf4j -@Component -@RequiredArgsConstructor -public class MatchProductWithKeywordTasklet implements Tasklet { - - private final FastApiAdapter fastApiAdapter; - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) - throws Exception { - // log.info(">>>> [Step 3] 상품 매칭 Tasklet 실행 시작"); - - ExecutionContext jobExecutionContext = getJobExecutionContext(chunkContext); - String keyword = (String) jobExecutionContext.get(JobContextKeys.EXTRACTED_KEYWORD); - List> searchResults = - (List>) jobExecutionContext.get(JobContextKeys.SEARCHED_PRODUCTS); - - RequestSsadaguMatch request = new RequestSsadaguMatch(1, 1, null, keyword, searchResults); - ResponseSsadaguMatch response = fastApiAdapter.requestProductMatch(request); - - if (response == null || !"200".equals(response.status())) { - throw new RuntimeException("FastAPI 상품 매칭에 실패했습니다."); - } - - List> matchedProducts = response.matchedProducts(); - log.info(">>>> FastAPI로부터 매칭된 상품 {}개", matchedProducts.size()); - - jobExecutionContext.put(JobContextKeys.MATCHED_PRODUCTS, matchedProducts); - - log.info(">>>> [Step 3] 상품 매칭 Tasklet 실행 완료"); - return RepeatStatus.FINISHED; - } - - private ExecutionContext getJobExecutionContext(ChunkContext chunkContext) { - return chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); - } -} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/PublishBlogPostTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/PublishBlogPostTasklet.java deleted file mode 100644 index e1b75a18..00000000 --- a/apps/user-service/src/main/java/site/icebang/batch/tasklet/PublishBlogPostTasklet.java +++ /dev/null @@ -1,68 +0,0 @@ -package site.icebang.batch.tasklet; - -import java.util.List; -import java.util.Map; - -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.stereotype.Component; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import site.icebang.batch.common.JobContextKeys; -import site.icebang.external.fastapi.adapter.FastApiAdapter; -import site.icebang.external.fastapi.dto.FastApiDto.RequestBlogPublish; -import site.icebang.external.fastapi.dto.FastApiDto.ResponseBlogPublish; - -@Slf4j -@Component -@RequiredArgsConstructor -public class PublishBlogPostTasklet implements Tasklet { - - private final FastApiAdapter fastApiAdapter; - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) - throws Exception { - // log.info(">>>> [Step 7] 블로그 발행 Tasklet 실행 시작"); - - ExecutionContext jobExecutionContext = getJobExecutionContext(chunkContext); - Map content = - (Map) jobExecutionContext.get(JobContextKeys.GENERATED_CONTENT); - - // TODO: UserConfig 등에서 실제 블로그 정보(ID, PW)를 가져와야 함 - String blogId = "my_blog_id"; - String blogPw = "my_blog_password"; - - RequestBlogPublish request = - new RequestBlogPublish( - 1, - 1, - null, - "naver", - blogId, - blogPw, - (String) content.get("title"), - (String) content.get("content"), - (List) content.get("tags")); - - ResponseBlogPublish response = fastApiAdapter.requestBlogPost(request); - - if (response == null || !"200".equals(response.status())) { - throw new RuntimeException("FastAPI 블로그 발행에 실패했습니다."); - } - - log.info(">>>> FastAPI를 통해 블로그 발행 성공: {}", response.metadata()); - - // log.info(">>>> [Step 7] 블로그 발행 Tasklet 실행 완료"); - return RepeatStatus.FINISHED; - } - - private ExecutionContext getJobExecutionContext(ChunkContext chunkContext) { - return chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); - } -} diff --git a/apps/user-service/src/main/java/site/icebang/batch/tasklet/SearchProductsFromMallTasklet.java b/apps/user-service/src/main/java/site/icebang/batch/tasklet/SearchProductsFromMallTasklet.java deleted file mode 100644 index 3480f391..00000000 --- a/apps/user-service/src/main/java/site/icebang/batch/tasklet/SearchProductsFromMallTasklet.java +++ /dev/null @@ -1,58 +0,0 @@ -package site.icebang.batch.tasklet; - -import java.util.List; -import java.util.Map; - -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.stereotype.Component; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import site.icebang.batch.common.JobContextKeys; -import site.icebang.external.fastapi.adapter.FastApiAdapter; -import site.icebang.external.fastapi.dto.FastApiDto.RequestSsadaguSearch; -import site.icebang.external.fastapi.dto.FastApiDto.ResponseSsadaguSearch; - -@Slf4j -@Component -@RequiredArgsConstructor -public class SearchProductsFromMallTasklet implements Tasklet { - - private final FastApiAdapter fastApiAdapter; - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) - throws Exception { - // log.info(">>>> [Step 2] 상품 검색 Tasklet 실행 시작"); - - ExecutionContext jobExecutionContext = getJobExecutionContext(chunkContext); - String keyword = (String) jobExecutionContext.get(JobContextKeys.EXTRACTED_KEYWORD); - - if (keyword == null) { - throw new RuntimeException("이전 Step에서 키워드를 전달받지 못했습니다."); - } - - RequestSsadaguSearch request = new RequestSsadaguSearch(1, 1, null, keyword); - ResponseSsadaguSearch response = fastApiAdapter.requestSsadaguProductSearch(request); - - if (response == null || !"200".equals(response.status())) { - throw new RuntimeException("FastAPI 상품 검색에 실패했습니다."); - } - List> searchResults = response.searchResults(); - log.info(">>>> FastAPI로부터 검색된 상품 {}개", searchResults.size()); - - jobExecutionContext.put(JobContextKeys.SEARCHED_PRODUCTS, searchResults); - - // log.info(">>>> [Step 2] 상품 검색 Tasklet 실행 완료"); - return RepeatStatus.FINISHED; - } - - private ExecutionContext getJobExecutionContext(ChunkContext chunkContext) { - return chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); - } -} diff --git a/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/JobRunMapper.java b/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/JobRunMapper.java new file mode 100644 index 00000000..d5ce7e8f --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/JobRunMapper.java @@ -0,0 +1,12 @@ +package site.icebang.domain.execution.mapper; + +import org.apache.ibatis.annotations.Mapper; + +import site.icebang.domain.execution.model.JobRun; + +@Mapper +public interface JobRunMapper { + void insert(JobRun jobRun); + + void update(JobRun jobRun); +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/TaskRunMapper.java b/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/TaskRunMapper.java new file mode 100644 index 00000000..646a7c91 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/TaskRunMapper.java @@ -0,0 +1,12 @@ +package site.icebang.domain.execution.mapper; + +import org.apache.ibatis.annotations.Mapper; + +import site.icebang.domain.execution.model.TaskRun; + +@Mapper +public interface TaskRunMapper { + void insert(TaskRun taskRun); + + void update(TaskRun taskRun); +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/WorkflowRunMapper.java b/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/WorkflowRunMapper.java new file mode 100644 index 00000000..776ec4b0 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/execution/mapper/WorkflowRunMapper.java @@ -0,0 +1,12 @@ +package site.icebang.domain.execution.mapper; + +import org.apache.ibatis.annotations.Mapper; + +import site.icebang.domain.execution.model.WorkflowRun; + +@Mapper +public interface WorkflowRunMapper { + void insert(WorkflowRun workflowRun); + + void update(WorkflowRun workflowRun); +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/execution/model/JobRun.java b/apps/user-service/src/main/java/site/icebang/domain/execution/model/JobRun.java new file mode 100644 index 00000000..f5310f12 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/execution/model/JobRun.java @@ -0,0 +1,38 @@ +package site.icebang.domain.execution.model; + +import java.time.LocalDateTime; + +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Getter +@NoArgsConstructor +public class JobRun { + + private Long id; + private Long workflowRunId; + private Long jobId; + private String status; // PENDING, RUNNING, SUCCESS, FAILED + private LocalDateTime startedAt; + private LocalDateTime finishedAt; + private LocalDateTime createdAt; + + private JobRun(Long workflowRunId, Long jobId) { + this.workflowRunId = workflowRunId; + this.jobId = jobId; + this.status = "RUNNING"; + this.startedAt = LocalDateTime.now(); + this.createdAt = this.startedAt; + } + + /** Job 실행 시작을 위한 정적 팩토리 메소드 */ + public static JobRun start(Long workflowRunId, Long jobId) { + return new JobRun(workflowRunId, jobId); + } + + /** Job 실행 완료 처리 */ + public void finish(String status) { + this.status = status; + this.finishedAt = LocalDateTime.now(); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/execution/model/TaskRun.java b/apps/user-service/src/main/java/site/icebang/domain/execution/model/TaskRun.java new file mode 100644 index 00000000..f1ae2239 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/execution/model/TaskRun.java @@ -0,0 +1,43 @@ +package site.icebang.domain.execution.model; + +import java.time.LocalDateTime; + +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Getter +@NoArgsConstructor +public class TaskRun { + + private Long id; + private Long jobRunId; + private Long taskId; + private String status; // PENDING, RUNNING, SUCCESS, FAILED + private String resultMessage; // 실행 결과 메시지 + private LocalDateTime startedAt; + private LocalDateTime finishedAt; + private LocalDateTime createdAt; + + // 생성자나 정적 팩토리 메서드를 통해 객체 생성 로직을 관리 + private TaskRun(Long jobRunId, Long taskId) { + this.jobRunId = jobRunId; + this.taskId = taskId; + this.status = "PENDING"; + this.createdAt = LocalDateTime.now(); + } + + /** Task 실행 시작을 위한 정적 팩토리 메서드 */ + public static TaskRun start(Long jobRunId, Long taskId) { + TaskRun taskRun = new TaskRun(jobRunId, taskId); + taskRun.status = "RUNNING"; + taskRun.startedAt = LocalDateTime.now(); + return taskRun; + } + + /** Task 실행 완료 처리 */ + public void finish(String status, String resultMessage) { + this.status = status; + this.resultMessage = resultMessage; + this.finishedAt = LocalDateTime.now(); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/execution/model/WorkflowRun.java b/apps/user-service/src/main/java/site/icebang/domain/execution/model/WorkflowRun.java new file mode 100644 index 00000000..6bd5dbc9 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/execution/model/WorkflowRun.java @@ -0,0 +1,39 @@ +package site.icebang.domain.execution.model; + +import java.time.LocalDateTime; +import java.util.UUID; + +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Getter +@NoArgsConstructor +public class WorkflowRun { + + private Long id; + private Long workflowId; + private String traceId; // 분산 추적을 위한 ID + private String status; // PENDING, RUNNING, SUCCESS, FAILED + private LocalDateTime startedAt; + private LocalDateTime finishedAt; + private LocalDateTime createdAt; + + private WorkflowRun(Long workflowId) { + this.workflowId = workflowId; + this.traceId = UUID.randomUUID().toString(); // 고유 추적 ID 생성 + this.status = "RUNNING"; + this.startedAt = LocalDateTime.now(); + this.createdAt = this.startedAt; + } + + /** 워크플로우 실행 시작을 위한 정적 팩토리 메소드 */ + public static WorkflowRun start(Long workflowId) { + return new WorkflowRun(workflowId); + } + + /** 워크플로우 실행 완료 처리 */ + public void finish(String status) { + this.status = status; + this.finishedAt = LocalDateTime.now(); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java new file mode 100644 index 00000000..12567a60 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/mapper/ScheduleMapper.java @@ -0,0 +1,12 @@ +package site.icebang.domain.schedule.mapper; + +import java.util.List; + +import org.apache.ibatis.annotations.Mapper; + +import site.icebang.domain.schedule.model.Schedule; + +@Mapper +public interface ScheduleMapper { + List findAllActive(); +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java new file mode 100644 index 00000000..c2218bd0 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java @@ -0,0 +1,31 @@ +package site.icebang.domain.schedule.model; + +import java.time.LocalDateTime; + +import lombok.AccessLevel; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter // 서비스 레이어에서의 상태 변경 및 MyBatis 매핑을 위해 사용 +@Builder +@NoArgsConstructor(access = AccessLevel.PROTECTED) +@AllArgsConstructor +public class Schedule { + + private Long id; + private Long workflowId; + private String cronExpression; + private String parameters; // JSON format + private boolean isActive; + private String lastRunStatus; + private LocalDateTime lastRunAt; + private LocalDateTime createdAt; + private Long createdBy; + private LocalDateTime updatedAt; + private Long updatedBy; + private String scheduleText; +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/schedule/service/QuartzScheduleService.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/service/QuartzScheduleService.java new file mode 100644 index 00000000..3a5f1aef --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/service/QuartzScheduleService.java @@ -0,0 +1,43 @@ +package site.icebang.domain.schedule.service; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.quartz.*; +import org.springframework.stereotype.Service; +import site.icebang.domain.schedule.model.Schedule; +import site.icebang.domain.workflow.scheduler.WorkflowTriggerJob; + +@Slf4j +@Service +@RequiredArgsConstructor +public class QuartzScheduleService { + + private final Scheduler scheduler; + + public void addOrUpdateSchedule(Schedule schedule) { + JobKey jobKey = JobKey.jobKey("workflow-" + schedule.getWorkflowId()); + JobDetail jobDetail = JobBuilder.newJob(WorkflowTriggerJob.class) + .withIdentity(jobKey) + .withDescription("Workflow " + schedule.getWorkflowId() + " Trigger Job") + .usingJobData("workflowId", schedule.getWorkflowId()) + .storeDurably() + .build(); + + TriggerKey triggerKey = TriggerKey.triggerKey("trigger-for-workflow-" + schedule.getWorkflowId()); + Trigger trigger = TriggerBuilder.newTrigger() + .forJob(jobDetail) + .withIdentity(triggerKey) + .withSchedule(CronScheduleBuilder.cronSchedule(schedule.getCronExpression())) + .build(); + try { + scheduler.scheduleJob(jobDetail, trigger); + log.info("Quartz 스케줄 등록/업데이트 완료: Workflow ID {}", schedule.getWorkflowId()); + } catch (SchedulerException e) { + log.error("Quartz 스케줄 등록 실패", e); + } + } + + public void deleteSchedule(Long workflowId) { + // ... (삭제 로직) + } +} \ No newline at end of file diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java index 39077eca..348058ee 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java @@ -1,9 +1,9 @@ package site.icebang.domain.workflow.controller; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.ModelAttribute; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RestController; +import java.util.concurrent.CompletableFuture; + +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; import lombok.RequiredArgsConstructor; @@ -11,6 +11,7 @@ import site.icebang.common.dto.PageParams; import site.icebang.common.dto.PageResult; import site.icebang.domain.workflow.dto.WorkflowCardDto; +import site.icebang.domain.workflow.service.WorkflowExecutionService; import site.icebang.domain.workflow.service.WorkflowService; @RestController @@ -18,6 +19,7 @@ @RequiredArgsConstructor public class WorkflowController { private final WorkflowService workflowService; + private final WorkflowExecutionService workflowExecutionService; @GetMapping("") public ApiResponse> getWorkflowList( @@ -25,4 +27,11 @@ public ApiResponse> getWorkflowList( PageResult result = workflowService.getPagedResult(pageParams); return ApiResponse.success(result); } + + @PostMapping("/{workflowId}/run") + public ResponseEntity runWorkflow(@PathVariable Long workflowId) { + // HTTP 요청/응답 스레드를 블로킹하지 않도록 비동기 실행 + CompletableFuture.runAsync(() -> workflowExecutionService.executeWorkflow(workflowId)); + return ResponseEntity.accepted().build(); + } } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java index b54a29c0..95a6b704 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java @@ -1,6 +1,13 @@ package site.icebang.domain.workflow.dto; -import lombok.Data; +import lombok.Getter; +import lombok.NoArgsConstructor; -@Data -public class WorkflowCardDto {} +@Getter +@NoArgsConstructor +public class WorkflowCardDto { + private Long id; + private String name; + private String description; + private boolean isEnabled; +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/JobMapper.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/JobMapper.java new file mode 100644 index 00000000..a82739f4 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/JobMapper.java @@ -0,0 +1,15 @@ +package site.icebang.domain.workflow.mapper; + +import java.util.List; + +import org.apache.ibatis.annotations.Mapper; + +import site.icebang.domain.workflow.model.Job; +import site.icebang.domain.workflow.model.Task; + +@Mapper +public interface JobMapper { + List findJobsByWorkflowId(Long workflowId); + + List findTasksByJobId(Long jobId); +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/TaskMapper.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/TaskMapper.java new file mode 100644 index 00000000..0edb7812 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/TaskMapper.java @@ -0,0 +1,12 @@ +package site.icebang.domain.workflow.mapper; + +import java.util.Optional; + +import org.apache.ibatis.annotations.Mapper; + +import site.icebang.domain.workflow.model.Task; + +@Mapper +public interface TaskMapper { + Optional findById(Long id); +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java new file mode 100644 index 00000000..4ddd94d3 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java @@ -0,0 +1,19 @@ +package site.icebang.domain.workflow.mapper; + +import java.util.List; +import java.util.Optional; + +import org.apache.ibatis.annotations.Mapper; + +import site.icebang.common.dto.PageParams; +import site.icebang.domain.workflow.dto.WorkflowCardDto; +import site.icebang.domain.workflow.model.Workflow; + +@Mapper +public interface WorkflowMapper { + Optional findById(Long id); + + List selectWorkflowList(PageParams pageParams); + + int selectWorkflowCount(PageParams pageParams); +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java new file mode 100644 index 00000000..0a3604b5 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java @@ -0,0 +1,22 @@ +package site.icebang.domain.workflow.model; + +import java.time.LocalDateTime; + +import lombok.AccessLevel; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Getter +@NoArgsConstructor(access = AccessLevel.PROTECTED) +@AllArgsConstructor +public class Job { + private Long id; + private String name; + private String description; + private boolean isEnabled; + private LocalDateTime createdAt; + private Long createdBy; + private LocalDateTime updatedAt; + private Long updatedBy; +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java new file mode 100644 index 00000000..09589cc1 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java @@ -0,0 +1,20 @@ +package site.icebang.domain.workflow.model; + +import com.fasterxml.jackson.databind.JsonNode; + +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Getter +@NoArgsConstructor // MyBatis가 객체를 생성하기 위해 필요 +public class Task { + + private Long id; + private String name; + + /** Task의 타입 (예: "HTTP", "SPRING_BATCH") 이 타입에 따라 TaskRunner가 선택됩니다. */ + private String type; + + /** Task 실행에 필요한 파라미터 (JSON) 예: {"url": "http://...", "method": "POST", "body": {...}} */ + private JsonNode parameters; +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java new file mode 100644 index 00000000..3ea80388 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java @@ -0,0 +1,28 @@ +package site.icebang.domain.workflow.model; + +import java.time.LocalDateTime; + +import lombok.AccessLevel; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Getter +@Builder +@NoArgsConstructor(access = AccessLevel.PROTECTED) +@AllArgsConstructor +public class Workflow { + + private Long id; + private String name; + private String description; + private boolean isEnabled; + private LocalDateTime createdAt; + private Long createdBy; + private LocalDateTime updatedAt; + private Long updatedBy; + + /** 워크플로우별 기본 설정값 (JSON) */ + private String defaultConfig; +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/HttpTaskRunner.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/HttpTaskRunner.java new file mode 100644 index 00000000..9f497b97 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/HttpTaskRunner.java @@ -0,0 +1,49 @@ +package site.icebang.domain.workflow.runner; + +import org.springframework.http.*; +import org.springframework.stereotype.Component; +import org.springframework.web.client.RestClientException; +import org.springframework.web.client.RestTemplate; + +import com.fasterxml.jackson.databind.JsonNode; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import site.icebang.domain.execution.model.TaskRun; +import site.icebang.domain.workflow.model.Task; + +@Slf4j +@Component("httpTaskRunner") +@RequiredArgsConstructor +public class HttpTaskRunner implements TaskRunner { + private final RestTemplate restTemplate; + + @Override + public TaskExecutionResult execute(Task task, TaskRun taskRun) { + JsonNode params = task.getParameters(); + String url = params.get("url").asText(); + String method = params.get("method").asText(); + JsonNode body = params.get("body"); + + try { + HttpEntity requestEntity = + new HttpEntity<>( + body.toString(), + new HttpHeaders() { + { + setContentType(MediaType.APPLICATION_JSON); + } + }); + + ResponseEntity response = + restTemplate.exchange( + url, HttpMethod.valueOf(method.toUpperCase()), requestEntity, String.class); + + return TaskExecutionResult.success(response.getBody()); + } catch (RestClientException e) { + log.error("HTTP Task 실행 실패: TaskRunId={}, Error={}", taskRun.getId(), e.getMessage()); + return TaskExecutionResult.failure(e.getMessage()); + } + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/TaskRunner.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/TaskRunner.java new file mode 100644 index 00000000..a2b820bb --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/TaskRunner.java @@ -0,0 +1,22 @@ +package site.icebang.domain.workflow.runner; + +import site.icebang.domain.execution.model.TaskRun; +import site.icebang.domain.workflow.model.Task; + +public interface TaskRunner { + record TaskExecutionResult(String status, String message) { + public static TaskExecutionResult success(String message) { + return new TaskExecutionResult("SUCCESS", message); + } + + public static TaskExecutionResult failure(String message) { + return new TaskExecutionResult("FAILED", message); + } + + public boolean isFailure() { + return "FAILED".equals(status); + } + } + + TaskExecutionResult execute(Task task, TaskRun taskRun); +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/scheduler/WorkflowTriggerJob.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/scheduler/WorkflowTriggerJob.java new file mode 100644 index 00000000..196c1fa0 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/scheduler/WorkflowTriggerJob.java @@ -0,0 +1,24 @@ +package site.icebang.domain.workflow.scheduler; + +import org.quartz.JobExecutionContext; +import org.springframework.scheduling.quartz.QuartzJobBean; +import org.springframework.stereotype.Component; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import site.icebang.domain.workflow.service.WorkflowExecutionService; + +@Slf4j +@Component +@RequiredArgsConstructor +public class WorkflowTriggerJob extends QuartzJobBean { + private final WorkflowExecutionService workflowExecutionService; + + @Override + protected void executeInternal(JobExecutionContext context) { + Long workflowId = context.getJobDetail().getJobDataMap().getLong("workflowId"); + log.info("Quartz가 WorkflowTriggerJob을 실행합니다. WorkflowId={}", workflowId); + workflowExecutionService.executeWorkflow(workflowId); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java new file mode 100644 index 00000000..086b00de --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java @@ -0,0 +1,111 @@ +package site.icebang.domain.workflow.service; + +import java.util.List; +import java.util.Map; + +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import site.icebang.domain.execution.mapper.JobRunMapper; +import site.icebang.domain.execution.mapper.TaskRunMapper; +import site.icebang.domain.execution.mapper.WorkflowRunMapper; +import site.icebang.domain.execution.model.JobRun; +import site.icebang.domain.execution.model.TaskRun; +import site.icebang.domain.execution.model.WorkflowRun; +import site.icebang.domain.workflow.mapper.JobMapper; +import site.icebang.domain.workflow.model.Job; +import site.icebang.domain.workflow.model.Task; +import site.icebang.domain.workflow.runner.TaskRunner; + +@Slf4j +@Service +@RequiredArgsConstructor +public class WorkflowExecutionService { + + private final JobMapper jobMapper; + private final WorkflowRunMapper workflowRunMapper; + private final JobRunMapper jobRunMapper; + private final TaskRunMapper taskRunMapper; + private final Map taskRunners; + + /** + * 워크플로우 실행의 시작점. 전체 과정은 하나의 트랜잭션으로 묶입니다. + * + * @param workflowId 실행할 워크플로우의 ID + */ + @Transactional + public void executeWorkflow(Long workflowId) { + log.info("========== 워크플로우 실행 시작: WorkflowId={} ==========", workflowId); + WorkflowRun workflowRun = WorkflowRun.start(workflowId); + workflowRunMapper.insert(workflowRun); + + List jobs = jobMapper.findJobsByWorkflowId(workflowId); + log.info("총 {}개의 Job을 순차적으로 실행합니다.", jobs.size()); + + for (Job job : jobs) { + JobRun jobRun = JobRun.start(workflowRun.getId(), job.getId()); + jobRunMapper.insert(jobRun); + log.info( + "---------- Job 실행 시작: JobId={}, JobRunId={} ----------", job.getId(), jobRun.getId()); + + boolean jobSucceeded = executeTasksForJob(jobRun); + + jobRun.finish(jobSucceeded ? "SUCCESS" : "FAILED"); + jobRunMapper.update(jobRun); + + if (!jobSucceeded) { + workflowRun.finish("FAILED"); + workflowRunMapper.update(workflowRun); + log.error("Job 실패로 인해 워크플로우 실행을 중단합니다: WorkflowRunId={}", workflowRun.getId()); + return; // Job이 실패하면 전체 워크플로우를 중단 + } + log.info("---------- Job 실행 성공: JobRunId={} ----------", jobRun.getId()); + } + + workflowRun.finish("SUCCESS"); + workflowRunMapper.update(workflowRun); + log.info("========== 워크플로우 실행 성공: WorkflowRunId={} ==========", workflowRun.getId()); + } + + /** + * 특정 Job에 속한 Task들을 순차적으로 실행합니다. + * + * @param jobRun 실행중인 Job의 기록 객체 + * @return 모든 Task가 성공하면 true, 하나라도 실패하면 false + */ + private boolean executeTasksForJob(JobRun jobRun) { + List tasks = jobMapper.findTasksByJobId(jobRun.getJobId()); + log.info("Job (JobRunId={}) 내 총 {}개의 Task를 실행합니다.", jobRun.getId(), tasks.size()); + + for (Task task : tasks) { + TaskRun taskRun = TaskRun.start(jobRun.getId(), task.getId()); + taskRunMapper.insert(taskRun); + log.info("Task 실행 시작: TaskId={}, TaskRunId={}", task.getId(), taskRun.getId()); + + String runnerBeanName = task.getType().toLowerCase() + "TaskRunner"; + TaskRunner runner = taskRunners.get(runnerBeanName); + + if (runner == null) { + taskRun.finish("FAILED", "지원하지 않는 Task 타입: " + task.getType()); + taskRunMapper.update(taskRun); + log.error("Task 실행 실패 (미지원 타입): TaskRunId={}, Type={}", taskRun.getId(), task.getType()); + return false; // 실행할 Runner가 없으므로 실패 + } + + TaskRunner.TaskExecutionResult result = runner.execute(task, taskRun); + taskRun.finish(result.status(), result.message()); + taskRunMapper.update(taskRun); + + if (result.isFailure()) { + log.error("Task 실행 실패: TaskRunId={}, Message={}", taskRun.getId(), result.message()); + return false; // Task가 실패하면 즉시 중단하고 실패 반환 + } + log.info("Task 실행 성공: TaskRunId={}", taskRun.getId()); + } + + return true; // 모든 Task가 성공적으로 완료됨 + } +} diff --git a/apps/user-service/src/main/java/site/icebang/external/fastapi/adapter/FastApiAdapter.java b/apps/user-service/src/main/java/site/icebang/external/fastapi/adapter/FastApiAdapter.java deleted file mode 100644 index e4e81a73..00000000 --- a/apps/user-service/src/main/java/site/icebang/external/fastapi/adapter/FastApiAdapter.java +++ /dev/null @@ -1,106 +0,0 @@ -package site.icebang.external.fastapi.adapter; - -import org.springframework.stereotype.Component; -import org.springframework.web.client.RestClientException; -import org.springframework.web.client.RestTemplate; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import site.icebang.external.fastapi.dto.FastApiDto.*; -import site.icebang.global.config.properties.FastApiProperties; - -/** FastAPI 서버와의 통신을 전담하는 어댑터 클래스. 모든 외부 API 호출은 이 클래스를 통해 이루어집니다. */ -@Slf4j -@Component -@RequiredArgsConstructor -public class FastApiAdapter { - - private final RestTemplate restTemplate; - private final FastApiProperties properties; - - /** TASK 1: 네이버 키워드 추출을 FastAPI에 요청합니다. */ - public ResponseNaverSearch requestNaverKeywordSearch(RequestNaverSearch request) { - String url = properties.getUrl() + "/keyword/search"; - log.info("Requesting to FastAPI [POST {}]", url); - try { - return restTemplate.postForObject(url, request, ResponseNaverSearch.class); - } catch (RestClientException e) { - log.error("Failed to call FastAPI keyword search API. Error: {}", e.getMessage()); - // TODO: 비즈니스 요구사항에 맞는 예외 처리 (재시도, 기본값 반환, 특정 예외 던지기 등) - return null; - } - } - - /** TASK 2: 싸다구몰 상품 검색을 FastAPI에 요청합니다. */ - public ResponseSsadaguSearch requestSsadaguProductSearch(RequestSsadaguSearch request) { - String url = properties.getUrl() + "/product/search"; - log.info("Requesting to FastAPI [POST {}]", url); - try { - return restTemplate.postForObject(url, request, ResponseSsadaguSearch.class); - } catch (RestClientException e) { - log.error("Failed to call FastAPI product search API. Error: {}", e.getMessage()); - return null; - } - } - - /** TASK 3: 상품 매칭을 FastAPI에 요청합니다. */ - public ResponseSsadaguMatch requestProductMatch(RequestSsadaguMatch request) { - String url = properties.getUrl() + "/product/match"; - log.info("Requesting to FastAPI [POST {}]", url); - try { - return restTemplate.postForObject(url, request, ResponseSsadaguMatch.class); - } catch (RestClientException e) { - log.error("Failed to call FastAPI product match API. Error: {}", e.getMessage()); - return null; - } - } - - /** TASK 4: 상품 유사도 분석을 FastAPI에 요청합니다. (메서드명 수정) */ - public ResponseSsadaguSimilarity requestProductSimilarity(RequestSsadaguSimilarity request) { - String url = properties.getUrl() + "/product/similarity"; - log.info("Requesting to FastAPI [POST {}]", url); - try { - return restTemplate.postForObject(url, request, ResponseSsadaguSimilarity.class); - } catch (RestClientException e) { - log.error("Failed to call FastAPI product similarity API. Error: {}", e.getMessage()); - return null; - } - } - - /** TASK 5: 상품 상세 정보 크롤링을 FastAPI에 요청합니다. */ - public ResponseSsadaguCrawl requestProductCrawl(RequestSsadaguCrawl request) { - String url = properties.getUrl() + "/product/crawl"; - log.info("Requesting to FastAPI [POST {}]", url); - try { - return restTemplate.postForObject(url, request, ResponseSsadaguCrawl.class); - } catch (RestClientException e) { - log.error("Failed to call FastAPI product crawl API. Error: {}", e.getMessage()); - return null; - } - } - - /** TASK 6: 블로그 콘텐츠 생성을 FastAPI에 요청합니다. */ - public ResponseBlogCreate requestBlogCreation(RequestBlogCreate request) { - String url = properties.getUrl() + "/blog/rag/create"; - log.info("Requesting to FastAPI [POST {}]", url); - try { - return restTemplate.postForObject(url, request, ResponseBlogCreate.class); - } catch (RestClientException e) { - log.error("Failed to call FastAPI blog creation API. Error: {}", e.getMessage()); - return null; - } - } - - /** TASK 7: 블로그 발행을 FastAPI에 요청합니다. */ - public ResponseBlogPublish requestBlogPost(RequestBlogPublish request) { - String url = properties.getUrl() + "/blog/publish"; - log.info("Requesting to FastAPI [POST {}]", url); - try { - return restTemplate.postForObject(url, request, ResponseBlogPublish.class); - } catch (RestClientException e) { - log.error("Failed to call FastAPI blog publish API. Error: {}", e.getMessage()); - return null; - } - } -} diff --git a/apps/user-service/src/main/java/site/icebang/external/fastapi/dto/FastApiDto.java b/apps/user-service/src/main/java/site/icebang/external/fastapi/dto/FastApiDto.java deleted file mode 100644 index 88ffe284..00000000 --- a/apps/user-service/src/main/java/site/icebang/external/fastapi/dto/FastApiDto.java +++ /dev/null @@ -1,103 +0,0 @@ -package site.icebang.external.fastapi.dto; - -import java.util.List; -import java.util.Map; - -import com.fasterxml.jackson.annotation.JsonProperty; - -/** FastAPI 서버와 통신하기 위한 DTO 클래스 모음. Java의 record를 사용하여 불변 데이터 객체를 간결하게 정의합니다. */ -public final class FastApiDto { - - // --- 1. 네이버 키워드 추출 --- - public record RequestNaverSearch( - @JsonProperty("job_id") int jobId, - @JsonProperty("schedule_id") int scheduleId, - @JsonProperty("schedule_his_id") Integer scheduleHisId, - String tag, - String category, - @JsonProperty("start_date") String startDate, - @JsonProperty("end_date") String endDate) {} - - public record ResponseNaverSearch( - String status, - String category, - String keyword, - @JsonProperty("total_keyword") Map totalKeyword) {} - - // --- 2. 상품 검색 --- - public record RequestSsadaguSearch( - @JsonProperty("job_id") int jobId, - @JsonProperty("schedule_id") int scheduleId, - @JsonProperty("schedule_his_id") Integer scheduleHisId, - String keyword) {} - - public record ResponseSsadaguSearch( - String status, - String keyword, - @JsonProperty("search_results") List> searchResults) {} - - // --- 3. 상품 매칭 --- - public record RequestSsadaguMatch( - @JsonProperty("job_id") int jobId, - @JsonProperty("schedule_id") int scheduleId, - @JsonProperty("schedule_his_id") Integer scheduleHisId, - String keyword, - @JsonProperty("search_results") List> searchResults) {} - - public record ResponseSsadaguMatch( - String status, - String keyword, - @JsonProperty("matched_products") List> matchedProducts) {} - - // --- 4. 상품 유사도 --- - public record RequestSsadaguSimilarity( - @JsonProperty("job_id") int jobId, - @JsonProperty("schedule_id") int scheduleId, - @JsonProperty("schedule_his_id") Integer scheduleHisId, - String keyword, - @JsonProperty("matched_products") List> matchedProducts, - @JsonProperty("search_results") List> searchResults) {} - - public record ResponseSsadaguSimilarity( - String status, - String keyword, - @JsonProperty("selected_product") Map selectedProduct, - String reason) {} - - // --- 5. 상품 크롤링 --- - public record RequestSsadaguCrawl( - @JsonProperty("job_id") int jobId, - @JsonProperty("schedule_id") int scheduleId, - @JsonProperty("schedule_his_id") Integer scheduleHisId, - String tag, - @JsonProperty("product_url") String productUrl) {} - - public record ResponseSsadaguCrawl( - String status, - String tag, - @JsonProperty("product_url") String productUrl, - @JsonProperty("product_detail") Map productDetail, - @JsonProperty("crawled_at") String crawledAt) {} - - // --- 6. 블로그 콘텐츠 생성 --- - public record RequestBlogCreate( - @JsonProperty("job_id") int jobId, - @JsonProperty("schedule_id") int scheduleId, - @JsonProperty("schedule_his_id") Integer scheduleHisId) {} - - public record ResponseBlogCreate(String status) {} - - // --- 7. 블로그 발행 --- - public record RequestBlogPublish( - @JsonProperty("job_id") int jobId, - @JsonProperty("schedule_id") int scheduleId, - @JsonProperty("schedule_his_id") Integer scheduleHisId, - String tag, - @JsonProperty("blog_id") String blogId, - @JsonProperty("blog_pw") String blogPw, - @JsonProperty("post_title") String postTitle, - @JsonProperty("post_content") String postContent, - @JsonProperty("post_tags") List postTags) {} - - public record ResponseBlogPublish(String status, Map metadata) {} -} diff --git a/apps/user-service/src/main/java/site/icebang/global/aop/logging/LoggingAspect.java b/apps/user-service/src/main/java/site/icebang/global/aop/logging/LoggingAspect.java index b1806cff..126c7d35 100644 --- a/apps/user-service/src/main/java/site/icebang/global/aop/logging/LoggingAspect.java +++ b/apps/user-service/src/main/java/site/icebang/global/aop/logging/LoggingAspect.java @@ -22,9 +22,6 @@ public void serviceMethods() {} @Pointcut("execution(public * site.icebang..service..mapper..*(..))") public void repositoryMethods() {} - @Pointcut("execution(public * site.icebang.batch.tasklet..*(..))") - public void taskletMethods() {} - @Around("controllerMethods()") public Object logController(ProceedingJoinPoint joinPoint) throws Throwable { long start = System.currentTimeMillis(); @@ -54,15 +51,4 @@ public Object logRepository(ProceedingJoinPoint joinPoint) throws Throwable { log.debug("[REPOSITORY] End: {} ({}ms)", joinPoint.getSignature(), duration); return result; } - - @Around("taskletMethods()") - public Object logTasklet(ProceedingJoinPoint joinPoint) throws Throwable { - long start = System.currentTimeMillis(); - // Tasklet 이름만으로도 구분이 되므로, 클래스명 + 메서드명으로 로그를 남깁니다. - log.info(">>>> [TASKLET] Start: {}", joinPoint.getSignature().toShortString()); - Object result = joinPoint.proceed(); // 실제 Tasklet의 execute() 메서드 실행 - long duration = System.currentTimeMillis() - start; - log.info("<<<< [TASKLET] End: {} ({}ms)", joinPoint.getSignature().toShortString(), duration); - return result; - } } diff --git a/apps/user-service/src/main/java/site/icebang/global/config/QuartzSchedulerInitializer.java b/apps/user-service/src/main/java/site/icebang/global/config/QuartzSchedulerInitializer.java new file mode 100644 index 00000000..233f5834 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/global/config/QuartzSchedulerInitializer.java @@ -0,0 +1,33 @@ +package site.icebang.global.config; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.boot.CommandLineRunner; +import org.springframework.stereotype.Component; +import site.icebang.domain.schedule.model.Schedule; +import site.icebang.domain.schedule.mapper.ScheduleMapper; +import site.icebang.domain.schedule.service.QuartzScheduleService; +import java.util.List; + +@Slf4j +@Component +@RequiredArgsConstructor +public class QuartzSchedulerInitializer implements CommandLineRunner { + + private final ScheduleMapper scheduleMapper; + private final QuartzScheduleService quartzScheduleService; + + @Override + public void run(String... args) { + log.info("Quartz 스케줄러 초기화 시작: DB 스케줄을 등록합니다."); + try { + List activeSchedules = scheduleMapper.findAllActive(); + for (Schedule schedule : activeSchedules) { + quartzScheduleService.addOrUpdateSchedule(schedule); + } + log.info("총 {}개의 활성 스케줄을 Quartz에 성공적으로 등록했습니다.", activeSchedules.size()); + } catch (Exception e) { + log.error("Quartz 스케줄 초기화 중 오류가 발생했습니다.", e); + } + } +} \ No newline at end of file diff --git a/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/JsonNodeTypeHandler.java b/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/JsonNodeTypeHandler.java new file mode 100644 index 00000000..4079c9f3 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/JsonNodeTypeHandler.java @@ -0,0 +1,56 @@ +package site.icebang.global.config.mybatis.typehandler; + +import java.sql.CallableStatement; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; + +import org.apache.ibatis.type.BaseTypeHandler; +import org.apache.ibatis.type.JdbcType; +import org.apache.ibatis.type.MappedTypes; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +@MappedTypes(JsonNode.class) +public class JsonNodeTypeHandler extends BaseTypeHandler { + + private static final ObjectMapper objectMapper = new ObjectMapper(); + + @Override + public void setNonNullParameter( + PreparedStatement ps, int i, JsonNode parameter, JdbcType jdbcType) throws SQLException { + try { + ps.setString(i, objectMapper.writeValueAsString(parameter)); + } catch (JsonProcessingException e) { + throw new SQLException("Error converting JsonNode to String", e); + } + } + + @Override + public JsonNode getNullableResult(ResultSet rs, String columnName) throws SQLException { + return parseJson(rs.getString(columnName)); + } + + @Override + public JsonNode getNullableResult(ResultSet rs, int columnIndex) throws SQLException { + return parseJson(rs.getString(columnIndex)); + } + + @Override + public JsonNode getNullableResult(CallableStatement cs, int columnIndex) throws SQLException { + return parseJson(cs.getString(columnIndex)); + } + + private JsonNode parseJson(String json) throws SQLException { + if (json == null) { + return null; + } + try { + return objectMapper.readTree(json); + } catch (JsonProcessingException e) { + throw new SQLException("Error parsing JSON", e); + } + } +} diff --git a/apps/user-service/src/main/java/site/icebang/global/config/scheduler/SchedulerConfig.java b/apps/user-service/src/main/java/site/icebang/global/config/scheduler/SchedulerConfig.java deleted file mode 100644 index 79fc6436..00000000 --- a/apps/user-service/src/main/java/site/icebang/global/config/scheduler/SchedulerConfig.java +++ /dev/null @@ -1,28 +0,0 @@ -package site.icebang.global.config.scheduler; - -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.scheduling.TaskScheduler; -import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler; - -/** 동적 스케줄링을 위한 TaskScheduler Bean을 설정하는 클래스 */ -@Configuration -public class SchedulerConfig { - - @Bean - public TaskScheduler taskScheduler() { - // ThreadPool 기반의 TaskScheduler를 생성합니다. - ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler(); - - // 스케줄러가 사용할 스레드 풀의 크기를 설정합니다. - // 동시에 실행될 수 있는 스케줄 작업의 최대 개수입니다. - scheduler.setPoolSize(10); - - // 스레드 이름의 접두사를 설정하여 로그 추적을 용이하게 합니다. - scheduler.setThreadNamePrefix("dynamic-scheduler-"); - - // 스케줄러를 초기화합니다. - scheduler.initialize(); - return scheduler; - } -} diff --git a/apps/user-service/src/main/java/site/icebang/schedule/mapper/ScheduleMapper.java b/apps/user-service/src/main/java/site/icebang/schedule/mapper/ScheduleMapper.java deleted file mode 100644 index b1a92f1e..00000000 --- a/apps/user-service/src/main/java/site/icebang/schedule/mapper/ScheduleMapper.java +++ /dev/null @@ -1,12 +0,0 @@ -package site.icebang.schedule.mapper; - -import java.util.List; - -import org.apache.ibatis.annotations.Mapper; - -import site.icebang.schedule.model.Schedule; - -@Mapper -public interface ScheduleMapper { - List findAllByIsActive(boolean isActive); -} diff --git a/apps/user-service/src/main/java/site/icebang/schedule/model/Schedule.java b/apps/user-service/src/main/java/site/icebang/schedule/model/Schedule.java deleted file mode 100644 index ced2900c..00000000 --- a/apps/user-service/src/main/java/site/icebang/schedule/model/Schedule.java +++ /dev/null @@ -1,14 +0,0 @@ -package site.icebang.schedule.model; - -import lombok.Getter; -import lombok.Setter; - -@Getter -@Setter -public class Schedule { - private Long scheduleId; - private Long workflowId; - private String cronExpression; - private boolean isActive; - // ... 기타 필요한 컬럼 -} diff --git a/apps/user-service/src/main/java/site/icebang/schedule/runner/SchedulerInitializer.java b/apps/user-service/src/main/java/site/icebang/schedule/runner/SchedulerInitializer.java deleted file mode 100644 index ee8580dd..00000000 --- a/apps/user-service/src/main/java/site/icebang/schedule/runner/SchedulerInitializer.java +++ /dev/null @@ -1,31 +0,0 @@ -package site.icebang.schedule.runner; - -import java.util.List; - -import org.springframework.boot.ApplicationArguments; -import org.springframework.boot.ApplicationRunner; -import org.springframework.stereotype.Component; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import site.icebang.schedule.mapper.ScheduleMapper; -import site.icebang.schedule.model.Schedule; -import site.icebang.schedule.service.DynamicSchedulerService; - -@Slf4j -@Component -@RequiredArgsConstructor -public class SchedulerInitializer implements ApplicationRunner { - - private final ScheduleMapper scheduleMapper; - private final DynamicSchedulerService dynamicSchedulerService; - - @Override - public void run(ApplicationArguments args) { - log.info(">>>> Initializing schedules from database..."); - List activeSchedules = scheduleMapper.findAllByIsActive(true); - activeSchedules.forEach(dynamicSchedulerService::register); - log.info(">>>> {} active schedules have been registered.", activeSchedules.size()); - } -} diff --git a/apps/user-service/src/main/java/site/icebang/schedule/service/DynamicSchedulerService.java b/apps/user-service/src/main/java/site/icebang/schedule/service/DynamicSchedulerService.java deleted file mode 100644 index b78c048e..00000000 --- a/apps/user-service/src/main/java/site/icebang/schedule/service/DynamicSchedulerService.java +++ /dev/null @@ -1,63 +0,0 @@ -package site.icebang.schedule.service; - -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ScheduledFuture; - -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.context.ApplicationContext; -import org.springframework.scheduling.TaskScheduler; -import org.springframework.stereotype.Service; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import site.icebang.schedule.model.Schedule; - -@Slf4j -@Service -@RequiredArgsConstructor -public class DynamicSchedulerService { - - private final TaskScheduler taskScheduler; - private final JobLauncher jobLauncher; - private final ApplicationContext applicationContext; - private final Map> scheduledTasks = new ConcurrentHashMap<>(); - - public void register(Schedule schedule) { - // TODO: schedule.getWorkflowId()를 기반으로 실행할 Job의 이름을 DB에서 조회 - // String jobName = "blogContentJob"; // 예시 - // Job jobToRun = applicationContext.getBean(jobName, Job.class); - // - // Runnable runnable = - // () -> { - // try { - // JobParametersBuilder paramsBuilder = new JobParametersBuilder(); - // paramsBuilder.addString("runAt", LocalDateTime.now().toString()); - // paramsBuilder.addLong("scheduleId", schedule.getScheduleId()); - // jobLauncher.run(jobToRun, paramsBuilder.toJobParameters()); - // } catch (Exception e) { - // log.error( - // "Failed to run scheduled job for scheduleId: {}", schedule.getScheduleId(), - // e); - // } - // }; - // - // CronTrigger trigger = new CronTrigger(schedule.getCronExpression()); - // ScheduledFuture future = taskScheduler.schedule(runnable, trigger); - // scheduledTasks.put(schedule.getScheduleId(), future); - // log.info( - // ">>>> Schedule registered: id={}, cron={}", - // schedule.getScheduleId(), - // schedule.getCronExpression()); - } - - public void remove(Long scheduleId) { - ScheduledFuture future = scheduledTasks.get(scheduleId); - if (future != null) { - future.cancel(true); - scheduledTasks.remove(scheduleId); - log.info(">>>> Schedule removed: id={}", scheduleId); - } - } -} diff --git a/apps/user-service/src/main/resources/application-develop.yml b/apps/user-service/src/main/resources/application-develop.yml index 8c24f49d..336d62ae 100644 --- a/apps/user-service/src/main/resources/application-develop.yml +++ b/apps/user-service/src/main/resources/application-develop.yml @@ -36,6 +36,20 @@ spring: - classpath:sql/03-insert-workflow.sql encoding: UTF-8 +# # Spring Quartz 스케줄러 설정 +# quartz: +# job-store-type: jdbc +# auto-startup: true +# jdbc: +# initialize-schema: embedded # 운영 환경을 기준으로 기본값 설정 +# properties: +# org.quartz.scheduler.instanceId: AUTO +# org.quartz.jobStore.class: org.quartz.impl.jdbcjobstore.JobStoreTX +# org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.StdJDBCDelegate +# org.quartz.jobStore.tablePrefix: QRTZ_ # Quartz 테이블 접두사 +# org.quartz.threadPool.class: org.quartz.simpl.SimpleThreadPool +# org.quartz.threadPool.threadCount: 5 # 개발 환경 스레드 수 + mybatis: mapper-locations: classpath:mybatis/mapper/**/*.xml type-aliases-package: site.icebang.dto diff --git a/apps/user-service/src/main/resources/application-production.yml b/apps/user-service/src/main/resources/application-production.yml index 6b048fbd..032954ad 100644 --- a/apps/user-service/src/main/resources/application-production.yml +++ b/apps/user-service/src/main/resources/application-production.yml @@ -17,6 +17,10 @@ spring: minimum-idle: 5 pool-name: HikariCP-MyBatis +# quartz: +# jdbc: +# initialize-schema: never + mybatis: mapper-locations: classpath:mybatis/mapper/**/*.xml type-aliases-package: site.icebang.dto diff --git a/apps/user-service/src/main/resources/application.yml b/apps/user-service/src/main/resources/application.yml index 7ede99ae..706eceea 100644 --- a/apps/user-service/src/main/resources/application.yml +++ b/apps/user-service/src/main/resources/application.yml @@ -7,6 +7,7 @@ spring: context: cache: maxSize: 1 + mybatis: # Mapper XML 파일 위치 mapper-locations: classpath:mapper/**/*.xml diff --git a/apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml new file mode 100644 index 00000000..54e29ae4 --- /dev/null +++ b/apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml new file mode 100644 index 00000000..4fd0ea3d --- /dev/null +++ b/apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + INSERT INTO job_run (workflow_run_id, job_id, status, started_at, created_at) + VALUES (#{workflowRunId}, #{jobId}, #{status}, #{startedAt}, #{createdAt}) + + + + UPDATE job_run + SET status = #{status}, + finished_at = #{finishedAt} + WHERE id = #{id} + + + \ No newline at end of file diff --git a/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml index f9629b8a..2a5480e3 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml @@ -1,17 +1,11 @@ - + - + + + - + SELECT * FROM schedule WHERE is_active = true - \ No newline at end of file diff --git a/apps/user-service/src/main/resources/mybatis/mapper/TaskMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/TaskMapper.xml new file mode 100644 index 00000000..7604cb94 --- /dev/null +++ b/apps/user-service/src/main/resources/mybatis/mapper/TaskMapper.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml new file mode 100644 index 00000000..582af278 --- /dev/null +++ b/apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml @@ -0,0 +1,17 @@ + + + + + + INSERT INTO task_run (job_run_id, task_id, status, started_at, created_at) + VALUES (#{jobRunId}, #{taskId}, #{status}, #{startedAt}, #{createdAt}) + + + + UPDATE task_run + SET status = #{status}, + finished_at = #{finishedAt}, + result_message = #{resultMessage} + WHERE id = #{id} + + \ No newline at end of file diff --git a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml new file mode 100644 index 00000000..d10c487a --- /dev/null +++ b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml @@ -0,0 +1,42 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml new file mode 100644 index 00000000..224abd02 --- /dev/null +++ b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + INSERT INTO workflow_run (workflow_id, trace_id, status, started_at, created_at) + VALUES (#{workflowId}, #{traceId}, #{status}, #{startedAt}, #{createdAt}) + + + + UPDATE workflow_run + SET status = #{status}, + finished_at = #{finishedAt} + WHERE id = #{id} + + + \ No newline at end of file From 65adfcadab70037cfb3668eafdbb2d6f4f302f23 Mon Sep 17 00:00:00 2001 From: kakusiA Date: Wed, 17 Sep 2025 14:48:14 +0900 Subject: [PATCH 50/57] =?UTF-8?q?feat:=20RDB,Loki=20=EB=A1=9C=EA=B7=B8=20?= =?UTF-8?q?=EC=A0=80=EC=9E=A5=20logic=20=EA=B0=9C=EB=B0=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/middleware/ServiceLoggerMiddleware.py | 116 +++- .../app/middleware/loki_logger.py | 195 ++++++ .../app/middleware/rds_logger.py | 153 +++++ .../app/model/execution_log.py | 79 +++ apps/pre-processing-service/poetry.lock | 601 +++++++++++++++++- apps/pre-processing-service/pyproject.toml | 1 + 6 files changed, 1141 insertions(+), 4 deletions(-) create mode 100644 apps/pre-processing-service/app/middleware/loki_logger.py create mode 100644 apps/pre-processing-service/app/middleware/rds_logger.py create mode 100644 apps/pre-processing-service/app/model/execution_log.py diff --git a/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py b/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py index acb120fa..03dd986b 100644 --- a/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py +++ b/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py @@ -8,6 +8,10 @@ import json import time +import asyncio + +from app.middleware.rds_logger import RDSLogger +from app.middleware.loki_logger import LokiLogger trace_id_context: ContextVar[str] = ContextVar("trace_id", default="NO_TRACE_ID") @@ -18,9 +22,11 @@ class ServiceLoggerMiddleware(BaseHTTPMiddleware): URL 패턴을 기반으로 자동으로 서비스 타입 식별 및 로깅 """ - def __init__(self, app, service_mappings: Dict[str, Dict] = None): + def __init__(self, app, service_mappings: Dict[str, Dict] = None, enable_rds: bool = True, enable_loki: bool = True): """ :param service_mappings: URL 패턴별 서비스 설정 + :param enable_rds: RDS 로깅 활성화 여부 + :param enable_loki: Loki 로깅 활성화 여부 예: { "/keywords/search": { "service_type": "NAVER_CRAWLING", @@ -31,6 +37,12 @@ def __init__(self, app, service_mappings: Dict[str, Dict] = None): """ super().__init__(app) self.service_mappings = service_mappings or self._default_mappings() + self.enable_rds = enable_rds + self.enable_loki = enable_loki + + # 로거 인스턴스 초기화 + self.rds_logger = RDSLogger() if enable_rds else None + self.loki_logger = LokiLogger() if enable_loki else None def _default_mappings(self) -> Dict[str, Dict]: """기본 서비스 매핑 설정""" @@ -78,7 +90,8 @@ async def dispatch(self, request: Request, call_next): return await call_next(request) # 2. 시작 로깅 - trace_id = trace_id_context.get("NO_TRACE_ID") + trace_id = request.headers.get("X-Request-ID", "NO_TRACE_ID") + trace_id_context.set(trace_id) start_time = time.time() # 파라미터 추출 및 시작 로그 @@ -91,11 +104,23 @@ async def dispatch(self, request: Request, call_next): service_type = service_config["service_type"] logger.info(f"[{service_type}_START] trace_id={trace_id}{param_str}") + # source_id 추출 (job_id, schedule_id 등에서) + source_id = self._extract_source_id(params) + run_id = params.get("run_id") + + # RDS 및 Loki에 시작 로그 전송 + start_message = f"[{service_type}_START]{param_str}" + await self._log_to_external_systems( + "start", service_type, source_id, trace_id, start_message, run_id, params + ) + # 3. 요청 처리 try: response = await call_next(request) # 4. 성공 로깅 + duration_ms = int((time.time() - start_time) * 1000) + if 200 <= response.status_code < 300: await self._log_success_response( service_type, @@ -105,16 +130,38 @@ async def dispatch(self, request: Request, call_next): response, service_config["response_trackers"], ) + + # 외부 로깅 시스템에 성공 로그 전송 + success_message = f"[{service_type}_SUCCESS]{param_str} status_code={response.status_code}" + await self._log_to_external_systems( + "success", service_type, source_id, trace_id, success_message, + run_id, params, duration_ms=duration_ms + ) else: await self._log_error_response( service_type, trace_id, start_time, param_str, response ) + # 외부 로깅 시스템에 에러 로그 전송 + error_message = f"[{service_type}_ERROR]{param_str} status_code={response.status_code}" + await self._log_to_external_systems( + "error", service_type, source_id, trace_id, error_message, + run_id, params, duration_ms=duration_ms, error_code=f"HTTP_{response.status_code}" + ) + return response except Exception as e: # 5. 예외 로깅 + duration_ms = int((time.time() - start_time) * 1000) await self._log_exception(service_type, trace_id, start_time, param_str, e) + + # 외부 로깅 시스템에 예외 로그 전송 + exception_message = f"[{service_type}_EXCEPTION]{param_str} exception={str(e)}" + await self._log_to_external_systems( + "error", service_type, source_id, trace_id, exception_message, + run_id, params, duration_ms=duration_ms, error_code="EXCEPTION" + ) raise def _get_service_config(self, url_path: str) -> Optional[Dict]: @@ -248,3 +295,68 @@ async def _log_exception( f"execution_time={duration:.4f}s{param_str} " f"exception={str(exception)}" ) + + def _extract_source_id(self, params: Dict[str, Any]) -> int: + """파라미터에서 source_id 추출 (job_id, schedule_id 등 우선순위)""" + for key in ["job_id", "schedule_id", "task_id", "workflow_id"]: + if key in params and params[key]: + try: + return int(params[key]) + except (ValueError, TypeError): + continue + return 0 # 기본값 + + async def _log_to_external_systems( + self, + log_type: str, # start, success, error + service_type: str, + source_id: int, + trace_id: str, + message: str, + run_id: Optional[int] = None, + params: Optional[Dict[str, Any]] = None, + duration_ms: Optional[int] = None, + error_code: Optional[str] = None + ): + """RDS와 Loki에 로그 전송""" + tasks = [] + + # 로깅할 추가 데이터 준비 + additional_data = params.copy() if params else {} + + if self.rds_logger: + if log_type == "start": + task = self.rds_logger.log_start( + service_type, source_id, trace_id, message, run_id, additional_data + ) + elif log_type == "success": + task = self.rds_logger.log_success( + service_type, source_id, trace_id, message, duration_ms, run_id, additional_data + ) + elif log_type == "error": + task = self.rds_logger.log_error( + service_type, source_id, trace_id, message, error_code, duration_ms, run_id, additional_data + ) + tasks.append(task) + + if self.loki_logger: + if log_type == "start": + task = self.loki_logger.log_start( + service_type, source_id, trace_id, message, run_id, additional_data + ) + elif log_type == "success": + task = self.loki_logger.log_success( + service_type, source_id, trace_id, message, duration_ms, run_id, additional_data + ) + elif log_type == "error": + task = self.loki_logger.log_error( + service_type, source_id, trace_id, message, error_code, duration_ms, run_id, additional_data + ) + tasks.append(task) + + # 비동기로 병렬 실행 (로깅 실패가 메인 로직에 영향을 주지 않도록) + if tasks: + try: + await asyncio.gather(*tasks, return_exceptions=True) + except Exception as e: + logger.debug(f"외부 로깅 시스템 전송 중 일부 실패: {e}") diff --git a/apps/pre-processing-service/app/middleware/loki_logger.py b/apps/pre-processing-service/app/middleware/loki_logger.py new file mode 100644 index 00000000..f4471a4c --- /dev/null +++ b/apps/pre-processing-service/app/middleware/loki_logger.py @@ -0,0 +1,195 @@ +import json +import aiohttp +import asyncio +from typing import Dict, List, Any, Optional +from datetime import datetime +from loguru import logger + +from app.model.execution_log import ExecutionLog + + +class LokiLogger: + """Loki에 로그를 전송하는 클래스""" + + def __init__(self, loki_host: str = "localhost", loki_port: int = 3100, app_name: str = "pre-processing-service"): + self.loki_url = f"http://{loki_host}:{loki_port}/loki/api/v1/push" + self.app_name = app_name + self.session = None + + async def _get_session(self) -> aiohttp.ClientSession: + """aiohttp 세션 관리""" + if self.session is None or self.session.closed: + self.session = aiohttp.ClientSession() + return self.session + + async def close(self): + """세션 종료""" + if self.session and not self.session.closed: + await self.session.close() + + async def send_log( + self, + execution_type: str, + source_id: int, + log_level: str, + log_message: str, + trace_id: Optional[str] = None, + run_id: Optional[int] = None, + status: Optional[str] = None, + duration_ms: Optional[int] = None, + error_code: Optional[str] = None, + additional_data: Optional[dict] = None + ) -> bool: + """ + Loki로 로그 전송 + + Args: + execution_type: task, schedule, job, workflow + source_id: 모든 데이터에 대한 ID + log_level: INFO, ERROR, WARNING, DEBUG + log_message: 로그 메시지 + trace_id: 추적 ID + run_id: 실행 ID + status: SUCCESS, ERROR, RUNNING, PENDING + duration_ms: 실행 시간(밀리초) + error_code: 에러 코드 + additional_data: 추가 데이터 + + Returns: + bool: 전송 성공 여부 + """ + try: + execution_log = ExecutionLog( + execution_type=execution_type, + source_id=source_id, + log_level=log_level, + executed_at=datetime.now(), + log_message=log_message, + trace_id=trace_id, + run_id=run_id, + status=status, + duration_ms=duration_ms, + error_code=error_code, + reserved4=additional_data + ) + + loki_data = execution_log.to_loki_format(self.app_name) + + # Loki push API 형식으로 변환 + payload = { + "streams": [ + { + "stream": loki_data["labels"], + "values": [ + [ + str(loki_data["log"]["timestamp"]), + json.dumps(loki_data["log"], ensure_ascii=False) + ] + ] + } + ] + } + + session = await self._get_session() + + async with session.post( + self.loki_url, + json=payload, + headers={"Content-Type": "application/json"}, + timeout=aiohttp.ClientTimeout(total=5) + ) as response: + if response.status == 204: + logger.debug(f"Loki 로그 전송 성공: {execution_type} - {log_message[:50]}...") + return True + else: + response_text = await response.text() + logger.error(f"Loki 로그 전송 실패: status={response.status}, response={response_text}") + return False + + except asyncio.TimeoutError: + logger.error("Loki 로그 전송 타임아웃") + return False + except Exception as e: + logger.error(f"Loki 로그 전송 실패: {str(e)}") + return False + + async def log_start( + self, + execution_type: str, + source_id: int, + trace_id: str, + log_message: str, + run_id: Optional[int] = None, + additional_data: Optional[dict] = None + ) -> bool: + """시작 로그 전송""" + return await self.send_log( + execution_type=execution_type, + source_id=source_id, + log_level="INFO", + log_message=log_message, + trace_id=trace_id, + run_id=run_id, + status="RUNNING", + additional_data=additional_data + ) + + async def log_success( + self, + execution_type: str, + source_id: int, + trace_id: str, + log_message: str, + duration_ms: int, + run_id: Optional[int] = None, + additional_data: Optional[dict] = None + ) -> bool: + """성공 로그 전송""" + return await self.send_log( + execution_type=execution_type, + source_id=source_id, + log_level="INFO", + log_message=log_message, + trace_id=trace_id, + run_id=run_id, + status="SUCCESS", + duration_ms=duration_ms, + additional_data=additional_data + ) + + async def log_error( + self, + execution_type: str, + source_id: int, + trace_id: str, + log_message: str, + error_code: str, + duration_ms: Optional[int] = None, + run_id: Optional[int] = None, + additional_data: Optional[dict] = None + ) -> bool: + """에러 로그 전송""" + return await self.send_log( + execution_type=execution_type, + source_id=source_id, + log_level="ERROR", + log_message=log_message, + trace_id=trace_id, + run_id=run_id, + status="ERROR", + duration_ms=duration_ms, + error_code=error_code, + additional_data=additional_data + ) + + def __del__(self): + """소멸자에서 세션 정리""" + if self.session and not self.session.closed: + try: + loop = asyncio.get_event_loop() + if loop.is_running(): + loop.create_task(self.session.close()) + else: + loop.run_until_complete(self.session.close()) + except: + pass \ No newline at end of file diff --git a/apps/pre-processing-service/app/middleware/rds_logger.py b/apps/pre-processing-service/app/middleware/rds_logger.py new file mode 100644 index 00000000..76611c04 --- /dev/null +++ b/apps/pre-processing-service/app/middleware/rds_logger.py @@ -0,0 +1,153 @@ +from typing import Optional +from datetime import datetime +import traceback +from loguru import logger + +from app.db.mariadb_manager import MariadbManager +from app.model.execution_log import ExecutionLog + + +class RDSLogger: + """RDS(MariaDB)에 로그를 저장하는 클래스""" + + def __init__(self): + self.db_manager = MariadbManager() + + async def log_execution( + self, + execution_type: str, + source_id: int, + log_level: str, + log_message: str, + trace_id: Optional[str] = None, + run_id: Optional[int] = None, + status: Optional[str] = None, + duration_ms: Optional[int] = None, + error_code: Optional[str] = None, + additional_data: Optional[dict] = None + ) -> bool: + """ + execution_log 테이블에 로그 저장 + + Args: + execution_type: task, schedule, job, workflow + source_id: 모든 데이터에 대한 ID + log_level: INFO, ERROR, WARNING, DEBUG + log_message: 로그 메시지 + trace_id: 추적 ID + run_id: 실행 ID + status: SUCCESS, ERROR, RUNNING, PENDING + duration_ms: 실행 시간(밀리초) + error_code: 에러 코드 + additional_data: 추가 데이터 (reserved4에 JSON으로 저장) + + Returns: + bool: 저장 성공 여부 + """ + try: + execution_log = ExecutionLog( + execution_type=execution_type, + source_id=source_id, + log_level=log_level, + executed_at=datetime.now(), + log_message=log_message, + trace_id=trace_id, + run_id=run_id, + status=status, + duration_ms=duration_ms, + error_code=error_code, + reserved4=additional_data + ) + + log_data = execution_log.to_dict() + + # 컬럼명과 값 분리 + columns = list(log_data.keys()) + values = list(log_data.values()) + placeholders = ', '.join(['%s'] * len(values)) + columns_str = ', '.join(columns) + + insert_query = f""" + INSERT INTO execution_log ({columns_str}) + VALUES ({placeholders}) + """ + + with self.db_manager.get_cursor() as cursor: + cursor.execute(insert_query, values) + + logger.debug(f"RDS 로그 저장 성공: {execution_type} - {log_message[:50]}...") + return True + + except Exception as e: + logger.error(f"RDS 로그 저장 실패: {str(e)}") + logger.error(f"Traceback: {traceback.format_exc()}") + return False + + async def log_start( + self, + execution_type: str, + source_id: int, + trace_id: str, + log_message: str, + run_id: Optional[int] = None, + additional_data: Optional[dict] = None + ) -> bool: + """시작 로그 저장""" + return await self.log_execution( + execution_type=execution_type, + source_id=source_id, + log_level="INFO", + log_message=log_message, + trace_id=trace_id, + run_id=run_id, + status="RUNNING", + additional_data=additional_data + ) + + async def log_success( + self, + execution_type: str, + source_id: int, + trace_id: str, + log_message: str, + duration_ms: int, + run_id: Optional[int] = None, + additional_data: Optional[dict] = None + ) -> bool: + """성공 로그 저장""" + return await self.log_execution( + execution_type=execution_type, + source_id=source_id, + log_level="INFO", + log_message=log_message, + trace_id=trace_id, + run_id=run_id, + status="SUCCESS", + duration_ms=duration_ms, + additional_data=additional_data + ) + + async def log_error( + self, + execution_type: str, + source_id: int, + trace_id: str, + log_message: str, + error_code: str, + duration_ms: Optional[int] = None, + run_id: Optional[int] = None, + additional_data: Optional[dict] = None + ) -> bool: + """에러 로그 저장""" + return await self.log_execution( + execution_type=execution_type, + source_id=source_id, + log_level="ERROR", + log_message=log_message, + trace_id=trace_id, + run_id=run_id, + status="ERROR", + duration_ms=duration_ms, + error_code=error_code, + additional_data=additional_data + ) \ No newline at end of file diff --git a/apps/pre-processing-service/app/model/execution_log.py b/apps/pre-processing-service/app/model/execution_log.py new file mode 100644 index 00000000..30c654b8 --- /dev/null +++ b/apps/pre-processing-service/app/model/execution_log.py @@ -0,0 +1,79 @@ +from dataclasses import dataclass +from datetime import datetime +from typing import Optional, Dict, Any +import json + + +@dataclass +class ExecutionLog: + """execution_log 테이블에 대응하는 데이터 모델""" + + execution_type: str # task, schedule, job, workflow + source_id: int # 모든 데이터에 대한 ID + log_level: str # INFO, ERROR, WARNING, DEBUG + executed_at: datetime + log_message: str + trace_id: Optional[str] = None + run_id: Optional[int] = None + status: Optional[str] = None # SUCCESS, ERROR, RUNNING, PENDING + duration_ms: Optional[int] = None + error_code: Optional[str] = None + reserved1: Optional[str] = None + reserved2: Optional[str] = None + reserved3: Optional[int] = None + reserved4: Optional[Dict[str, Any]] = None # JSON 데이터 + reserved5: Optional[datetime] = None + id: Optional[int] = None # auto_increment + + def to_dict(self) -> Dict[str, Any]: + """딕셔너리로 변환 (DB 삽입용)""" + data = { + 'execution_type': self.execution_type, + 'source_id': self.source_id, + 'log_level': self.log_level, + 'executed_at': self.executed_at, + 'log_message': self.log_message, + 'trace_id': self.trace_id, + 'run_id': self.run_id, + 'status': self.status, + 'duration_ms': self.duration_ms, + 'error_code': self.error_code, + 'reserved1': self.reserved1, + 'reserved2': self.reserved2, + 'reserved3': self.reserved3, + 'reserved4': json.dumps(self.reserved4) if self.reserved4 else None, + 'reserved5': self.reserved5 + } + return {k: v for k, v in data.items() if v is not None} + + def to_loki_format(self, app_name: str = "pre-processing-service") -> Dict[str, Any]: + """Loki 형식으로 변환""" + + labels = { + "app": app_name, + "env": "develop", + "traceId": self.trace_id or "NO_TRACE_ID", + "spanId": "", # 필요시 추가 + "executionType": self.execution_type, + "sourceId": str(self.source_id), + "runId": str(self.run_id) if self.run_id else "" + } + + log_data = { + "timestamp": int(self.executed_at.timestamp() * 1000000000), # nanoseconds + "level": self.log_level, + "message": self.log_message, + "execution_type": self.execution_type, + "source_id": self.source_id, + "status": self.status, + "duration_ms": self.duration_ms, + "error_code": self.error_code + } + + if self.reserved4: + log_data.update(self.reserved4) + + return { + "labels": labels, + "log": log_data + } \ No newline at end of file diff --git a/apps/pre-processing-service/poetry.lock b/apps/pre-processing-service/poetry.lock index bb05b60a..04b147b0 100644 --- a/apps/pre-processing-service/poetry.lock +++ b/apps/pre-processing-service/poetry.lock @@ -1,4 +1,140 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, + {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, +] + +[[package]] +name = "aiohttp" +version = "3.12.15" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohttp-3.12.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b6fc902bff74d9b1879ad55f5404153e2b33a82e72a95c89cec5eb6cc9e92fbc"}, + {file = "aiohttp-3.12.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:098e92835b8119b54c693f2f88a1dec690e20798ca5f5fe5f0520245253ee0af"}, + {file = "aiohttp-3.12.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:40b3fee496a47c3b4a39a731954c06f0bd9bd3e8258c059a4beb76ac23f8e421"}, + {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ce13fcfb0bb2f259fb42106cdc63fa5515fb85b7e87177267d89a771a660b79"}, + {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3beb14f053222b391bf9cf92ae82e0171067cc9c8f52453a0f1ec7c37df12a77"}, + {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c39e87afe48aa3e814cac5f535bc6199180a53e38d3f51c5e2530f5aa4ec58c"}, + {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5f1b4ce5bc528a6ee38dbf5f39bbf11dd127048726323b72b8e85769319ffc4"}, + {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1004e67962efabbaf3f03b11b4c43b834081c9e3f9b32b16a7d97d4708a9abe6"}, + {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8faa08fcc2e411f7ab91d1541d9d597d3a90e9004180edb2072238c085eac8c2"}, + {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fe086edf38b2222328cdf89af0dde2439ee173b8ad7cb659b4e4c6f385b2be3d"}, + {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:79b26fe467219add81d5e47b4a4ba0f2394e8b7c7c3198ed36609f9ba161aecb"}, + {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b761bac1192ef24e16706d761aefcb581438b34b13a2f069a6d343ec8fb693a5"}, + {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e153e8adacfe2af562861b72f8bc47f8a5c08e010ac94eebbe33dc21d677cd5b"}, + {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fc49c4de44977aa8601a00edbf157e9a421f227aa7eb477d9e3df48343311065"}, + {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2776c7ec89c54a47029940177e75c8c07c29c66f73464784971d6a81904ce9d1"}, + {file = "aiohttp-3.12.15-cp310-cp310-win32.whl", hash = "sha256:2c7d81a277fa78b2203ab626ced1487420e8c11a8e373707ab72d189fcdad20a"}, + {file = "aiohttp-3.12.15-cp310-cp310-win_amd64.whl", hash = "sha256:83603f881e11f0f710f8e2327817c82e79431ec976448839f3cd05d7afe8f830"}, + {file = "aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117"}, + {file = "aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe"}, + {file = "aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9"}, + {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5"}, + {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728"}, + {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16"}, + {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0"}, + {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b"}, + {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd"}, + {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8"}, + {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50"}, + {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676"}, + {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7"}, + {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7"}, + {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685"}, + {file = "aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b"}, + {file = "aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d"}, + {file = "aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7"}, + {file = "aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444"}, + {file = "aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d"}, + {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c"}, + {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0"}, + {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab"}, + {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb"}, + {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545"}, + {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c"}, + {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd"}, + {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f"}, + {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d"}, + {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519"}, + {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea"}, + {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3"}, + {file = "aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1"}, + {file = "aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34"}, + {file = "aiohttp-3.12.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315"}, + {file = "aiohttp-3.12.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd"}, + {file = "aiohttp-3.12.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4"}, + {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7"}, + {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d"}, + {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b"}, + {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d"}, + {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d"}, + {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645"}, + {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461"}, + {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9"}, + {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d"}, + {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693"}, + {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64"}, + {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51"}, + {file = "aiohttp-3.12.15-cp313-cp313-win32.whl", hash = "sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0"}, + {file = "aiohttp-3.12.15-cp313-cp313-win_amd64.whl", hash = "sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84"}, + {file = "aiohttp-3.12.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:691d203c2bdf4f4637792efbbcdcd157ae11e55eaeb5e9c360c1206fb03d4d98"}, + {file = "aiohttp-3.12.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e995e1abc4ed2a454c731385bf4082be06f875822adc4c6d9eaadf96e20d406"}, + {file = "aiohttp-3.12.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bd44d5936ab3193c617bfd6c9a7d8d1085a8dc8c3f44d5f1dcf554d17d04cf7d"}, + {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46749be6e89cd78d6068cdf7da51dbcfa4321147ab8e4116ee6678d9a056a0cf"}, + {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0c643f4d75adea39e92c0f01b3fb83d57abdec8c9279b3078b68a3a52b3933b6"}, + {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0a23918fedc05806966a2438489dcffccbdf83e921a1170773b6178d04ade142"}, + {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74bdd8c864b36c3673741023343565d95bfbd778ffe1eb4d412c135a28a8dc89"}, + {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a146708808c9b7a988a4af3821379e379e0f0e5e466ca31a73dbdd0325b0263"}, + {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7011a70b56facde58d6d26da4fec3280cc8e2a78c714c96b7a01a87930a9530"}, + {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3bdd6e17e16e1dbd3db74d7f989e8af29c4d2e025f9828e6ef45fbdee158ec75"}, + {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:57d16590a351dfc914670bd72530fd78344b885a00b250e992faea565b7fdc05"}, + {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bc9a0f6569ff990e0bbd75506c8d8fe7214c8f6579cca32f0546e54372a3bb54"}, + {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:536ad7234747a37e50e7b6794ea868833d5220b49c92806ae2d7e8a9d6b5de02"}, + {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f0adb4177fa748072546fb650d9bd7398caaf0e15b370ed3317280b13f4083b0"}, + {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14954a2988feae3987f1eb49c706bff39947605f4b6fa4027c1d75743723eb09"}, + {file = "aiohttp-3.12.15-cp39-cp39-win32.whl", hash = "sha256:b784d6ed757f27574dca1c336f968f4e81130b27595e458e69457e6878251f5d"}, + {file = "aiohttp-3.12.15-cp39-cp39-win_amd64.whl", hash = "sha256:86ceded4e78a992f835209e236617bffae649371c4a50d5e5a3987f237db84b8"}, + {file = "aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.5.0" +aiosignal = ">=1.4.0" +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" + +[package.extras] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.3.0)", "brotlicffi ; platform_python_implementation != \"CPython\""] + +[[package]] +name = "aiosignal" +version = "1.4.0" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e"}, + {file = "aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" +typing-extensions = {version = ">=4.2", markers = "python_version < \"3.13\""} [[package]] name = "annotated-types" @@ -531,6 +667,120 @@ files = [ {file = "flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e"}, ] +[[package]] +name = "frozenlist" +version = "1.7.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a"}, + {file = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61"}, + {file = "frozenlist-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0fd1bad056a3600047fb9462cff4c5322cebc59ebf5d0a3725e0ee78955001d"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3789ebc19cb811163e70fe2bd354cea097254ce6e707ae42e56f45e31e96cb8e"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af369aa35ee34f132fcfad5be45fbfcde0e3a5f6a1ec0712857f286b7d20cca9"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac64b6478722eeb7a3313d494f8342ef3478dff539d17002f849101b212ef97c"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f89f65d85774f1797239693cef07ad4c97fdd0639544bad9ac4b869782eb1981"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1073557c941395fdfcfac13eb2456cb8aad89f9de27bae29fabca8e563b12615"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed8d2fa095aae4bdc7fdd80351009a48d286635edffee66bf865e37a9125c50"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:24c34bea555fe42d9f928ba0a740c553088500377448febecaa82cc3e88aa1fa"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:69cac419ac6a6baad202c85aaf467b65ac860ac2e7f2ac1686dc40dbb52f6577"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:960d67d0611f4c87da7e2ae2eacf7ea81a5be967861e0c63cf205215afbfac59"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:41be2964bd4b15bf575e5daee5a5ce7ed3115320fb3c2b71fca05582ffa4dc9e"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:46d84d49e00c9429238a7ce02dc0be8f6d7cd0cd405abd1bebdc991bf27c15bd"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15900082e886edb37480335d9d518cec978afc69ccbc30bd18610b7c1b22a718"}, + {file = "frozenlist-1.7.0-cp310-cp310-win32.whl", hash = "sha256:400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e"}, + {file = "frozenlist-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464"}, + {file = "frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a"}, + {file = "frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750"}, + {file = "frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56"}, + {file = "frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7"}, + {file = "frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d"}, + {file = "frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2"}, + {file = "frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb"}, + {file = "frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43"}, + {file = "frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3"}, + {file = "frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a"}, + {file = "frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee"}, + {file = "frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d"}, + {file = "frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e"}, + {file = "frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1"}, + {file = "frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba"}, + {file = "frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d"}, + {file = "frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d"}, + {file = "frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf"}, + {file = "frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81"}, + {file = "frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e"}, + {file = "frozenlist-1.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cea3dbd15aea1341ea2de490574a4a37ca080b2ae24e4b4f4b51b9057b4c3630"}, + {file = "frozenlist-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d536ee086b23fecc36c2073c371572374ff50ef4db515e4e503925361c24f71"}, + {file = "frozenlist-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dfcebf56f703cb2e346315431699f00db126d158455e513bd14089d992101e44"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974c5336e61d6e7eb1ea5b929cb645e882aadab0095c5a6974a111e6479f8878"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c70db4a0ab5ab20878432c40563573229a7ed9241506181bba12f6b7d0dc41cb"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1137b78384eebaf70560a36b7b229f752fb64d463d38d1304939984d5cb887b6"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e793a9f01b3e8b5c0bc646fb59140ce0efcc580d22a3468d70766091beb81b35"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74739ba8e4e38221d2c5c03d90a7e542cb8ad681915f4ca8f68d04f810ee0a87"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e63344c4e929b1a01e29bc184bbb5fd82954869033765bfe8d65d09e336a677"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2ea2a7369eb76de2217a842f22087913cdf75f63cf1307b9024ab82dfb525938"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:836b42f472a0e006e02499cef9352ce8097f33df43baaba3e0a28a964c26c7d2"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e22b9a99741294b2571667c07d9f8cceec07cb92aae5ccda39ea1b6052ed4319"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:9a19e85cc503d958abe5218953df722748d87172f71b73cf3c9257a91b999890"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f22dac33bb3ee8fe3e013aa7b91dc12f60d61d05b7fe32191ffa84c3aafe77bd"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ccec739a99e4ccf664ea0775149f2749b8a6418eb5b8384b4dc0a7d15d304cb"}, + {file = "frozenlist-1.7.0-cp39-cp39-win32.whl", hash = "sha256:b3950f11058310008a87757f3eee16a8e1ca97979833239439586857bc25482e"}, + {file = "frozenlist-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:43a82fce6769c70f2f5a06248b614a7d268080a9d20f7457ef10ecee5af82b63"}, + {file = "frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e"}, + {file = "frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f"}, +] + [[package]] name = "fsspec" version = "2025.9.0" @@ -1171,6 +1421,126 @@ docs = ["sphinx"] gmpy = ["gmpy2 (>=2.1.0a4) ; platform_python_implementation != \"PyPy\""] tests = ["pytest (>=4.6)"] +[[package]] +name = "multidict" +version = "6.6.4" +description = "multidict implementation" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "multidict-6.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b8aa6f0bd8125ddd04a6593437bad6a7e70f300ff4180a531654aa2ab3f6d58f"}, + {file = "multidict-6.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b9e5853bbd7264baca42ffc53391b490d65fe62849bf2c690fa3f6273dbcd0cb"}, + {file = "multidict-6.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0af5f9dee472371e36d6ae38bde009bd8ce65ac7335f55dcc240379d7bed1495"}, + {file = "multidict-6.6.4-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:d24f351e4d759f5054b641c81e8291e5d122af0fca5c72454ff77f7cbe492de8"}, + {file = "multidict-6.6.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db6a3810eec08280a172a6cd541ff4a5f6a97b161d93ec94e6c4018917deb6b7"}, + {file = "multidict-6.6.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a1b20a9d56b2d81e2ff52ecc0670d583eaabaa55f402e8d16dd062373dbbe796"}, + {file = "multidict-6.6.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8c9854df0eaa610a23494c32a6f44a3a550fb398b6b51a56e8c6b9b3689578db"}, + {file = "multidict-6.6.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4bb7627fd7a968f41905a4d6343b0d63244a0623f006e9ed989fa2b78f4438a0"}, + {file = "multidict-6.6.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caebafea30ed049c57c673d0b36238b1748683be2593965614d7b0e99125c877"}, + {file = "multidict-6.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ad887a8250eb47d3ab083d2f98db7f48098d13d42eb7a3b67d8a5c795f224ace"}, + {file = "multidict-6.6.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:ed8358ae7d94ffb7c397cecb62cbac9578a83ecefc1eba27b9090ee910e2efb6"}, + {file = "multidict-6.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ecab51ad2462197a4c000b6d5701fc8585b80eecb90583635d7e327b7b6923eb"}, + {file = "multidict-6.6.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c5c97aa666cf70e667dfa5af945424ba1329af5dd988a437efeb3a09430389fb"}, + {file = "multidict-6.6.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:9a950b7cf54099c1209f455ac5970b1ea81410f2af60ed9eb3c3f14f0bfcf987"}, + {file = "multidict-6.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:163c7ea522ea9365a8a57832dea7618e6cbdc3cd75f8c627663587459a4e328f"}, + {file = "multidict-6.6.4-cp310-cp310-win32.whl", hash = "sha256:17d2cbbfa6ff20821396b25890f155f40c986f9cfbce5667759696d83504954f"}, + {file = "multidict-6.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:ce9a40fbe52e57e7edf20113a4eaddfacac0561a0879734e636aa6d4bb5e3fb0"}, + {file = "multidict-6.6.4-cp310-cp310-win_arm64.whl", hash = "sha256:01d0959807a451fe9fdd4da3e139cb5b77f7328baf2140feeaf233e1d777b729"}, + {file = "multidict-6.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c"}, + {file = "multidict-6.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb"}, + {file = "multidict-6.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e"}, + {file = "multidict-6.6.4-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded"}, + {file = "multidict-6.6.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683"}, + {file = "multidict-6.6.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a"}, + {file = "multidict-6.6.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9"}, + {file = "multidict-6.6.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50"}, + {file = "multidict-6.6.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52"}, + {file = "multidict-6.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6"}, + {file = "multidict-6.6.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e"}, + {file = "multidict-6.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3"}, + {file = "multidict-6.6.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c"}, + {file = "multidict-6.6.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b"}, + {file = "multidict-6.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f"}, + {file = "multidict-6.6.4-cp311-cp311-win32.whl", hash = "sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2"}, + {file = "multidict-6.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e"}, + {file = "multidict-6.6.4-cp311-cp311-win_arm64.whl", hash = "sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf"}, + {file = "multidict-6.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8"}, + {file = "multidict-6.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3"}, + {file = "multidict-6.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b"}, + {file = "multidict-6.6.4-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287"}, + {file = "multidict-6.6.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138"}, + {file = "multidict-6.6.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6"}, + {file = "multidict-6.6.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9"}, + {file = "multidict-6.6.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c"}, + {file = "multidict-6.6.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402"}, + {file = "multidict-6.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7"}, + {file = "multidict-6.6.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f"}, + {file = "multidict-6.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d"}, + {file = "multidict-6.6.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7"}, + {file = "multidict-6.6.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802"}, + {file = "multidict-6.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24"}, + {file = "multidict-6.6.4-cp312-cp312-win32.whl", hash = "sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793"}, + {file = "multidict-6.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e"}, + {file = "multidict-6.6.4-cp312-cp312-win_arm64.whl", hash = "sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364"}, + {file = "multidict-6.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f46a6e8597f9bd71b31cc708195d42b634c8527fecbcf93febf1052cacc1f16e"}, + {file = "multidict-6.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:22e38b2bc176c5eb9c0a0e379f9d188ae4cd8b28c0f53b52bce7ab0a9e534657"}, + {file = "multidict-6.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5df8afd26f162da59e218ac0eefaa01b01b2e6cd606cffa46608f699539246da"}, + {file = "multidict-6.6.4-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:49517449b58d043023720aa58e62b2f74ce9b28f740a0b5d33971149553d72aa"}, + {file = "multidict-6.6.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9408439537c5afdca05edd128a63f56a62680f4b3c234301055d7a2000220f"}, + {file = "multidict-6.6.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:87a32d20759dc52a9e850fe1061b6e41ab28e2998d44168a8a341b99ded1dba0"}, + {file = "multidict-6.6.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52e3c8d43cdfff587ceedce9deb25e6ae77daba560b626e97a56ddcad3756879"}, + {file = "multidict-6.6.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ad8850921d3a8d8ff6fbef790e773cecfc260bbfa0566998980d3fa8f520bc4a"}, + {file = "multidict-6.6.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:497a2954adc25c08daff36f795077f63ad33e13f19bfff7736e72c785391534f"}, + {file = "multidict-6.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:024ce601f92d780ca1617ad4be5ac15b501cc2414970ffa2bb2bbc2bd5a68fa5"}, + {file = "multidict-6.6.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a693fc5ed9bdd1c9e898013e0da4dcc640de7963a371c0bd458e50e046bf6438"}, + {file = "multidict-6.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:190766dac95aab54cae5b152a56520fd99298f32a1266d66d27fdd1b5ac00f4e"}, + {file = "multidict-6.6.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:34d8f2a5ffdceab9dcd97c7a016deb2308531d5f0fced2bb0c9e1df45b3363d7"}, + {file = "multidict-6.6.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:59e8d40ab1f5a8597abcef00d04845155a5693b5da00d2c93dbe88f2050f2812"}, + {file = "multidict-6.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:467fe64138cfac771f0e949b938c2e1ada2b5af22f39692aa9258715e9ea613a"}, + {file = "multidict-6.6.4-cp313-cp313-win32.whl", hash = "sha256:14616a30fe6d0a48d0a48d1a633ab3b8bec4cf293aac65f32ed116f620adfd69"}, + {file = "multidict-6.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:40cd05eaeb39e2bc8939451f033e57feaa2ac99e07dbca8afe2be450a4a3b6cf"}, + {file = "multidict-6.6.4-cp313-cp313-win_arm64.whl", hash = "sha256:f6eb37d511bfae9e13e82cb4d1af36b91150466f24d9b2b8a9785816deb16605"}, + {file = "multidict-6.6.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:6c84378acd4f37d1b507dfa0d459b449e2321b3ba5f2338f9b085cf7a7ba95eb"}, + {file = "multidict-6.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0e0558693063c75f3d952abf645c78f3c5dfdd825a41d8c4d8156fc0b0da6e7e"}, + {file = "multidict-6.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3f8e2384cb83ebd23fd07e9eada8ba64afc4c759cd94817433ab8c81ee4b403f"}, + {file = "multidict-6.6.4-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f996b87b420995a9174b2a7c1a8daf7db4750be6848b03eb5e639674f7963773"}, + {file = "multidict-6.6.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc356250cffd6e78416cf5b40dc6a74f1edf3be8e834cf8862d9ed5265cf9b0e"}, + {file = "multidict-6.6.4-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:dadf95aa862714ea468a49ad1e09fe00fcc9ec67d122f6596a8d40caf6cec7d0"}, + {file = "multidict-6.6.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7dd57515bebffd8ebd714d101d4c434063322e4fe24042e90ced41f18b6d3395"}, + {file = "multidict-6.6.4-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:967af5f238ebc2eb1da4e77af5492219fbd9b4b812347da39a7b5f5c72c0fa45"}, + {file = "multidict-6.6.4-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a4c6875c37aae9794308ec43e3530e4aa0d36579ce38d89979bbf89582002bb"}, + {file = "multidict-6.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f683a551e92bdb7fac545b9c6f9fa2aebdeefa61d607510b3533286fcab67f5"}, + {file = "multidict-6.6.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:3ba5aaf600edaf2a868a391779f7a85d93bed147854925f34edd24cc70a3e141"}, + {file = "multidict-6.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:580b643b7fd2c295d83cad90d78419081f53fd532d1f1eb67ceb7060f61cff0d"}, + {file = "multidict-6.6.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:37b7187197da6af3ee0b044dbc9625afd0c885f2800815b228a0e70f9a7f473d"}, + {file = "multidict-6.6.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e1b93790ed0bc26feb72e2f08299691ceb6da5e9e14a0d13cc74f1869af327a0"}, + {file = "multidict-6.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a506a77ddee1efcca81ecbeae27ade3e09cdf21a8ae854d766c2bb4f14053f92"}, + {file = "multidict-6.6.4-cp313-cp313t-win32.whl", hash = "sha256:f93b2b2279883d1d0a9e1bd01f312d6fc315c5e4c1f09e112e4736e2f650bc4e"}, + {file = "multidict-6.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:6d46a180acdf6e87cc41dc15d8f5c2986e1e8739dc25dbb7dac826731ef381a4"}, + {file = "multidict-6.6.4-cp313-cp313t-win_arm64.whl", hash = "sha256:756989334015e3335d087a27331659820d53ba432befdef6a718398b0a8493ad"}, + {file = "multidict-6.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:af7618b591bae552b40dbb6f93f5518328a949dac626ee75927bba1ecdeea9f4"}, + {file = "multidict-6.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b6819f83aef06f560cb15482d619d0e623ce9bf155115150a85ab11b8342a665"}, + {file = "multidict-6.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4d09384e75788861e046330308e7af54dd306aaf20eb760eb1d0de26b2bea2cb"}, + {file = "multidict-6.6.4-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:a59c63061f1a07b861c004e53869eb1211ffd1a4acbca330e3322efa6dd02978"}, + {file = "multidict-6.6.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:350f6b0fe1ced61e778037fdc7613f4051c8baf64b1ee19371b42a3acdb016a0"}, + {file = "multidict-6.6.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0c5cbac6b55ad69cb6aa17ee9343dfbba903118fd530348c330211dc7aa756d1"}, + {file = "multidict-6.6.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:630f70c32b8066ddfd920350bc236225814ad94dfa493fe1910ee17fe4365cbb"}, + {file = "multidict-6.6.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8d4916a81697faec6cb724a273bd5457e4c6c43d82b29f9dc02c5542fd21fc9"}, + {file = "multidict-6.6.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e42332cf8276bb7645d310cdecca93a16920256a5b01bebf747365f86a1675b"}, + {file = "multidict-6.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f3be27440f7644ab9a13a6fc86f09cdd90b347c3c5e30c6d6d860de822d7cb53"}, + {file = "multidict-6.6.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:21f216669109e02ef3e2415ede07f4f8987f00de8cdfa0cc0b3440d42534f9f0"}, + {file = "multidict-6.6.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d9890d68c45d1aeac5178ded1d1cccf3bc8d7accf1f976f79bf63099fb16e4bd"}, + {file = "multidict-6.6.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:edfdcae97cdc5d1a89477c436b61f472c4d40971774ac4729c613b4b133163cb"}, + {file = "multidict-6.6.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:0b2e886624be5773e69cf32bcb8534aecdeb38943520b240fed3d5596a430f2f"}, + {file = "multidict-6.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:be5bf4b3224948032a845d12ab0f69f208293742df96dc14c4ff9b09e508fc17"}, + {file = "multidict-6.6.4-cp39-cp39-win32.whl", hash = "sha256:10a68a9191f284fe9d501fef4efe93226e74df92ce7a24e301371293bd4918ae"}, + {file = "multidict-6.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:ee25f82f53262f9ac93bd7e58e47ea1bdcc3393cef815847e397cba17e284210"}, + {file = "multidict-6.6.4-cp39-cp39-win_arm64.whl", hash = "sha256:f9867e55590e0855bcec60d4f9a092b69476db64573c9fe17e92b0c50614c16a"}, + {file = "multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c"}, + {file = "multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd"}, +] + [[package]] name = "mypy-extensions" version = "1.1.0" @@ -1432,6 +1802,114 @@ files = [ {file = "poetry_core-2.2.0.tar.gz", hash = "sha256:b4033b71b99717a942030e074fec7e3082e5fde7a8ed10f02cd2413bdf940b1f"}, ] +[[package]] +name = "propcache" +version = "0.3.2" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770"}, + {file = "propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3"}, + {file = "propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c"}, + {file = "propcache-0.3.2-cp310-cp310-win32.whl", hash = "sha256:404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70"}, + {file = "propcache-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9"}, + {file = "propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be"}, + {file = "propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f"}, + {file = "propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e"}, + {file = "propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897"}, + {file = "propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39"}, + {file = "propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10"}, + {file = "propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154"}, + {file = "propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1"}, + {file = "propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1"}, + {file = "propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c"}, + {file = "propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945"}, + {file = "propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252"}, + {file = "propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43"}, + {file = "propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02"}, + {file = "propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05"}, + {file = "propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b"}, + {file = "propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0"}, + {file = "propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330"}, + {file = "propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394"}, + {file = "propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198"}, + {file = "propcache-0.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a7fad897f14d92086d6b03fdd2eb844777b0c4d7ec5e3bac0fbae2ab0602bbe5"}, + {file = "propcache-0.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1f43837d4ca000243fd7fd6301947d7cb93360d03cd08369969450cc6b2ce3b4"}, + {file = "propcache-0.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:261df2e9474a5949c46e962065d88eb9b96ce0f2bd30e9d3136bcde84befd8f2"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e514326b79e51f0a177daab1052bc164d9d9e54133797a3a58d24c9c87a3fe6d"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a996adb6904f85894570301939afeee65f072b4fd265ed7e569e8d9058e4ec"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76cace5d6b2a54e55b137669b30f31aa15977eeed390c7cbfb1dafa8dfe9a701"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31248e44b81d59d6addbb182c4720f90b44e1efdc19f58112a3c3a1615fb47ef"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abb7fa19dbf88d3857363e0493b999b8011eea856b846305d8c0512dfdf8fbb1"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d81ac3ae39d38588ad0549e321e6f773a4e7cc68e7751524a22885d5bbadf886"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:cc2782eb0f7a16462285b6f8394bbbd0e1ee5f928034e941ffc444012224171b"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:db429c19a6c7e8a1c320e6a13c99799450f411b02251fb1b75e6217cf4a14fcb"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:21d8759141a9e00a681d35a1f160892a36fb6caa715ba0b832f7747da48fb6ea"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2ca6d378f09adb13837614ad2754fa8afaee330254f404299611bce41a8438cb"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:34a624af06c048946709f4278b4176470073deda88d91342665d95f7c6270fbe"}, + {file = "propcache-0.3.2-cp39-cp39-win32.whl", hash = "sha256:4ba3fef1c30f306b1c274ce0b8baaa2c3cdd91f645c48f06394068f37d3837a1"}, + {file = "propcache-0.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:7a2368eed65fc69a7a7a40b27f22e85e7627b74216f0846b04ba5c116e191ec9"}, + {file = "propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f"}, + {file = "propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168"}, +] + [[package]] name = "proto-plus" version = "1.26.1" @@ -2753,7 +3231,126 @@ files = [ [package.dependencies] h11 = ">=0.9.0,<1" +[[package]] +name = "yarl" +version = "1.20.1" +description = "Yet another URL library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4"}, + {file = "yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a"}, + {file = "yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13"}, + {file = "yarl-1.20.1-cp310-cp310-win32.whl", hash = "sha256:6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8"}, + {file = "yarl-1.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16"}, + {file = "yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e"}, + {file = "yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b"}, + {file = "yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e"}, + {file = "yarl-1.20.1-cp311-cp311-win32.whl", hash = "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773"}, + {file = "yarl-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e"}, + {file = "yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9"}, + {file = "yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a"}, + {file = "yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004"}, + {file = "yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5"}, + {file = "yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698"}, + {file = "yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a"}, + {file = "yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3"}, + {file = "yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1"}, + {file = "yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7"}, + {file = "yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c"}, + {file = "yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d"}, + {file = "yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf"}, + {file = "yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e"}, + {file = "yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d"}, + {file = "yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f"}, + {file = "yarl-1.20.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e42ba79e2efb6845ebab49c7bf20306c4edf74a0b20fc6b2ccdd1a219d12fad3"}, + {file = "yarl-1.20.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:41493b9b7c312ac448b7f0a42a089dffe1d6e6e981a2d76205801a023ed26a2b"}, + {file = "yarl-1.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f5a5928ff5eb13408c62a968ac90d43f8322fd56d87008b8f9dabf3c0f6ee983"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30c41ad5d717b3961b2dd785593b67d386b73feca30522048d37298fee981805"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:59febc3969b0781682b469d4aca1a5cab7505a4f7b85acf6db01fa500fa3f6ba"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d2b6fb3622b7e5bf7a6e5b679a69326b4279e805ed1699d749739a61d242449e"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:749d73611db8d26a6281086f859ea7ec08f9c4c56cec864e52028c8b328db723"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9427925776096e664c39e131447aa20ec738bdd77c049c48ea5200db2237e000"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff70f32aa316393eaf8222d518ce9118148eddb8a53073c2403863b41033eed5"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c7ddf7a09f38667aea38801da8b8d6bfe81df767d9dfc8c88eb45827b195cd1c"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:57edc88517d7fc62b174fcfb2e939fbc486a68315d648d7e74d07fac42cec240"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:dab096ce479d5894d62c26ff4f699ec9072269d514b4edd630a393223f45a0ee"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14a85f3bd2d7bb255be7183e5d7d6e70add151a98edf56a770d6140f5d5f4010"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c89b5c792685dd9cd3fa9761c1b9f46fc240c2a3265483acc1565769996a3f8"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:69e9b141de5511021942a6866990aea6d111c9042235de90e08f94cf972ca03d"}, + {file = "yarl-1.20.1-cp39-cp39-win32.whl", hash = "sha256:b5f307337819cdfdbb40193cad84978a029f847b0a357fbe49f712063cfc4f06"}, + {file = "yarl-1.20.1-cp39-cp39-win_amd64.whl", hash = "sha256:eae7bfe2069f9c1c5b05fc7fe5d612e5bbc089a39309904ee8b829e322dcad00"}, + {file = "yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77"}, + {file = "yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +propcache = ">=0.2.1" + [metadata] lock-version = "2.1" python-versions = ">=3.11,<3.14" -content-hash = "d02c427c36a8894fe681d04a4c0c45de45d5793a4d264375992ce1104096f620" +content-hash = "6e10697924e89b5c0f7c3f6ecd79fb64ac412ac2240f75565d6ea5feb3a89a20" diff --git a/apps/pre-processing-service/pyproject.toml b/apps/pre-processing-service/pyproject.toml index c724ee24..672bf645 100644 --- a/apps/pre-processing-service/pyproject.toml +++ b/apps/pre-processing-service/pyproject.toml @@ -35,6 +35,7 @@ poetry-core=">=2.1.3,<3.0.0" dbutils=">=3.1.2,<4.0.0" onnxruntime = "^1.22.1" openai = "^1.107.3" +aiohttp = "^3.12.15" [build-system] requires = ["poetry-core>=2.0.0,<3.0.0"] From d45860d61d265d560eca7ea090d2c8de9ba9cd9c Mon Sep 17 00:00:00 2001 From: kakusiA Date: Wed, 17 Sep 2025 16:30:15 +0900 Subject: [PATCH 51/57] =?UTF-8?q?refactor:=20loging=20=EC=93=B8=EB=AA=A8?= =?UTF-8?q?=EC=97=86=EB=8A=94=20=EB=A1=9C=EA=B7=B8=20=EC=A3=BC=EC=84=9D?= =?UTF-8?q?=EB=B0=8F=20=ED=95=98=EB=93=9C=EC=BD=94=EB=94=A9=20=EC=82=AD?= =?UTF-8?q?=EC=A0=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/pre-processing-service/app/core/config.py | 5 +++++ .../pre-processing-service/app/middleware/loki_logger.py | 9 +++++---- apps/pre-processing-service/app/middleware/rds_logger.py | 2 +- apps/pre-processing-service/app/model/execution_log.py | 5 +++-- 4 files changed, 14 insertions(+), 7 deletions(-) diff --git a/apps/pre-processing-service/app/core/config.py b/apps/pre-processing-service/app/core/config.py index 69e29d35..2de3833a 100644 --- a/apps/pre-processing-service/app/core/config.py +++ b/apps/pre-processing-service/app/core/config.py @@ -76,10 +76,15 @@ class BaseSettingsConfig(BaseSettings): db_pass: str db_name: str env_name: str + app_name: str # MeCab 사전 경로 (자동 감지) mecab_path: Optional[str] = None + # Loki 설정 + loki_host: str = "localhost" + loki_port: int = 3100 + # 테스트/추가용 필드 openai_api_key: Optional[str] = None # << 이 부분 추가 diff --git a/apps/pre-processing-service/app/middleware/loki_logger.py b/apps/pre-processing-service/app/middleware/loki_logger.py index f4471a4c..7870c563 100644 --- a/apps/pre-processing-service/app/middleware/loki_logger.py +++ b/apps/pre-processing-service/app/middleware/loki_logger.py @@ -6,14 +6,15 @@ from loguru import logger from app.model.execution_log import ExecutionLog +from app.core.config import settings class LokiLogger: """Loki에 로그를 전송하는 클래스""" - def __init__(self, loki_host: str = "localhost", loki_port: int = 3100, app_name: str = "pre-processing-service"): - self.loki_url = f"http://{loki_host}:{loki_port}/loki/api/v1/push" - self.app_name = app_name + def __init__(self): + self.loki_url = f"{settings.loki_host}:{settings.loki_port}/loki/api/v1/push" + self.app_name = settings.app_name self.session = None async def _get_session(self) -> aiohttp.ClientSession: @@ -99,7 +100,7 @@ async def send_log( timeout=aiohttp.ClientTimeout(total=5) ) as response: if response.status == 204: - logger.debug(f"Loki 로그 전송 성공: {execution_type} - {log_message[:50]}...") + # logger.debug(f"Loki 로그 전송 성공: {execution_type} - {log_message[:50]}...") return True else: response_text = await response.text() diff --git a/apps/pre-processing-service/app/middleware/rds_logger.py b/apps/pre-processing-service/app/middleware/rds_logger.py index 76611c04..f483a1fc 100644 --- a/apps/pre-processing-service/app/middleware/rds_logger.py +++ b/apps/pre-processing-service/app/middleware/rds_logger.py @@ -75,7 +75,7 @@ async def log_execution( with self.db_manager.get_cursor() as cursor: cursor.execute(insert_query, values) - logger.debug(f"RDS 로그 저장 성공: {execution_type} - {log_message[:50]}...") + # logger.debug(f"RDS 로그 저장 성공: {execution_type} - {log_message[:50]}...") return True except Exception as e: diff --git a/apps/pre-processing-service/app/model/execution_log.py b/apps/pre-processing-service/app/model/execution_log.py index 30c654b8..069ab392 100644 --- a/apps/pre-processing-service/app/model/execution_log.py +++ b/apps/pre-processing-service/app/model/execution_log.py @@ -13,6 +13,7 @@ class ExecutionLog: log_level: str # INFO, ERROR, WARNING, DEBUG executed_at: datetime log_message: str + span_id: str =""#테스트값 trace_id: Optional[str] = None run_id: Optional[int] = None status: Optional[str] = None # SUCCESS, ERROR, RUNNING, PENDING @@ -38,7 +39,7 @@ def to_dict(self) -> Dict[str, Any]: 'status': self.status, 'duration_ms': self.duration_ms, 'error_code': self.error_code, - 'reserved1': self.reserved1, + 'reserved1': self.span_id, 'reserved2': self.reserved2, 'reserved3': self.reserved3, 'reserved4': json.dumps(self.reserved4) if self.reserved4 else None, @@ -53,7 +54,7 @@ def to_loki_format(self, app_name: str = "pre-processing-service") -> Dict[str, "app": app_name, "env": "develop", "traceId": self.trace_id or "NO_TRACE_ID", - "spanId": "", # 필요시 추가 + "spanId": self.span_id, # 필요시 추가 "executionType": self.execution_type, "sourceId": str(self.source_id), "runId": str(self.run_id) if self.run_id else "" From 5ca1bf3aaa1811804bb3314bce072afbb7c4768a Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Wed, 17 Sep 2025 16:31:22 +0900 Subject: [PATCH 52/57] =?UTF-8?q?Spring=20boot=20Promtail=EC=9D=84=20?= =?UTF-8?q?=ED=86=B5=ED=95=B4=20Loki=20log=20=EC=A0=84=EC=86=A1=20(#115)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Loki Promtail을 통해 file 전송으로 변경 * chore: Promtail을 통해 loki 전송 (local) * chore: Exception handler error, warn logging - warn: Data duplicate - error: Exception * fix: spanid logging 추가 * chore: production grafana 설정 * chore: Promtail 복사 ci 작성 * chore: User-service promtail 이 후 container화 되도록 설정 * chore: Production, grafana 연동 세팅 --- .github/workflows/deploy-java.yml | 14 ++++ .../exception/GlobalExceptionHandler.java | 5 ++ .../src/main/resources/log4j2-develop.yml | 75 ++++++------------- .../src/main/resources/log4j2-production.yml | 11 +-- docker/local/docker-compose.yml | 26 +++++++ docker/local/promtail-config.yml | 49 ++++++++++++ docker/production/docker-compose.yml | 25 ++++++- docker/production/promtail-config.yml | 49 ++++++++++++ 8 files changed, 197 insertions(+), 57 deletions(-) create mode 100644 docker/local/promtail-config.yml create mode 100644 docker/production/promtail-config.yml diff --git a/.github/workflows/deploy-java.yml b/.github/workflows/deploy-java.yml index eb2865d6..d7526506 100644 --- a/.github/workflows/deploy-java.yml +++ b/.github/workflows/deploy-java.yml @@ -28,6 +28,9 @@ jobs: echo "DB_USER=${{ secrets.DB_USER }}" >> .env.prod echo "DB_PASS=${{ secrets.DB_PASS }}" >> .env.prod echo "DB_NAME=${{ secrets.DB_NAME }}" >> .env.prod + echo "ENV_NAME=${{ secrets.LOKI_URL }}" >> .env.prod + echo "ENV_NAME=${{ secrets.LOKI_USERNAME }}" >> .env.prod + echo "ENV_NAME=${{ secrets.LOKI_PASSWORD }}" >> .env.prod echo "ENV_NAME=${{ secrets.ENV_NAME }}" >> .env.prod - name: Set repo lowercase @@ -62,6 +65,17 @@ jobs: target: "~/app/docker/production/" overwrite: true + - name: Copy promtail-config to EC2 + uses: appleboy/scp-action@v0.1.7 + with: + host: ${{ secrets.SERVER_HOST }} + username: ubuntu + key: ${{ secrets.SERVER_SSH_KEY }} + source: "docker/production/promtail-config.yml" + target: "~/app/docker/production/" + overwrite: true + + - name: Deploy on EC2 uses: appleboy/ssh-action@v1.0.3 with: diff --git a/apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java b/apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java index 4eba15ae..8243acde 100644 --- a/apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java +++ b/apps/user-service/src/main/java/site/icebang/global/handler/exception/GlobalExceptionHandler.java @@ -9,10 +9,13 @@ import org.springframework.web.bind.annotation.RestControllerAdvice; import org.springframework.web.servlet.resource.NoResourceFoundException; +import lombok.extern.slf4j.Slf4j; + import site.icebang.common.dto.ApiResponse; import site.icebang.common.exception.DuplicateDataException; @RestControllerAdvice +@Slf4j public class GlobalExceptionHandler { @ExceptionHandler(MethodArgumentNotValidException.class) @ResponseStatus(HttpStatus.BAD_REQUEST) @@ -24,6 +27,7 @@ public ApiResponse handleValidation(MethodArgumentNotValidException ex) @ExceptionHandler(Exception.class) @ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR) public ApiResponse handleGeneric(Exception ex) { + log.error(ex.getMessage(), ex); return ApiResponse.error("Internal error: ", HttpStatus.INTERNAL_SERVER_ERROR); } @@ -48,6 +52,7 @@ public ApiResponse handleAccessDenied(AccessDeniedException ex) { @ExceptionHandler(DuplicateDataException.class) @ResponseStatus(HttpStatus.CONFLICT) public ApiResponse handleDuplicateData(DuplicateDataException ex) { + log.warn(ex.getMessage(), ex); return ApiResponse.error("Duplicate: " + ex.getMessage(), HttpStatus.CONFLICT); } } diff --git a/apps/user-service/src/main/resources/log4j2-develop.yml b/apps/user-service/src/main/resources/log4j2-develop.yml index 8e68569b..21790eea 100644 --- a/apps/user-service/src/main/resources/log4j2-develop.yml +++ b/apps/user-service/src/main/resources/log4j2-develop.yml @@ -7,16 +7,16 @@ Configuration: - name: "app-name" value: "user-service" - name: "log-path" - value: "./logs" + value: "./docker/local/logs" - name: "charset-UTF-8" value: "UTF-8" # DEBUG 환경용 콘솔 패턴 - 더 간단하고 가독성 좋게 - name: "console-layout-pattern" value: "%highlight{[%-5level]} [%X{traceId}] [%X{spanId}] %d{HH:mm:ss} [%t] %n %logger{20} - %msg%n%n " - # 파일용 패턴 + # 파일용 패턴 - Promtail이 파싱하기 쉽게 구조화 (UTC 시간 사용) - name: "file-layout-pattern" - value: "[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n" - # 개발 환경용 로그 파일들 + value: "[%X{traceId}] [%X{spanId}] %d{yyyy-MM-dd HH:mm:ss.SSS}{UTC} [%t] %-5level %logger{36} - %msg%n" + # 개발 환경용 로그 파일들 - 절대경로나 상대경로 설정 - name: "dev-log" value: ${log-path}/develop/app.log - name: "error-log" @@ -29,37 +29,8 @@ Configuration: target: SYSTEM_OUT PatternLayout: pattern: ${console-layout-pattern} -# disableAnsi: false - - # Loki Appender - 개발환경 모니터링용 - Loki: - name: loki-appender - host: localhost - port: 3100 - JsonLayout: - compact: true - eventEol: true - includeStacktrace: true - KeyValuePair: - - key: "app" - value: "${app-name}" - - key: "env" - value: "develop" - Label: - - name: "app" - value: "${app-name}" - - name: "env" - value: "develop" - - name: "traceId" - value: "${ctx:traceId}" - - name: "spanId" - value: "${ctx:spanId}" - - name: "executionType" - value: "${ctx:executionType:-application}" - - name: "sourceId" - value: "${ctx:sourceId}" - - name: "runId" - value: "${ctx:runId}" + + # JDBC Appender - 워크플로우 로그용 JDBC: name: workflow-appender tableName: "execution_log" @@ -80,20 +51,27 @@ Configuration: - name: "log_level" pattern: "%level" - name: "executed_at" - pattern: "%d{yyyy-MM-dd HH:mm:ss.SSS}" # 패턴으로 시간 직접 지정 + pattern: "%d{yyyy-MM-dd HH:mm:ss.SSS}" - name: "log_message" pattern: "%message" - name: "trace_id" pattern: "%X{traceId}" - name: "reserved1" pattern: "%X{spanId}" -# - name: "config_snapshot" -# pattern: "%X{configSnapshot}" + + # 파일 Appenders - Promtail이 이 파일들을 읽음 File: - name: file-dev-appender fileName: ${dev-log} PatternLayout: pattern: ${file-layout-pattern} + # 로그 로테이션 설정 (선택사항) + # Policies: + # SizeBasedTriggeringPolicy: + # size: 10MB + # DefaultRolloverStrategy: + # max: 10 + - name: file-error-appender fileName: ${error-log} PatternLayout: @@ -102,20 +80,19 @@ Configuration: level: ERROR Loggers: - # Root 로거 - 개발환경에서는 기본적으로 INFO 레벨 + # Root 로거 Root: level: INFO AppenderRef: - ref: console-appender Logger: - # 애플리케이션 로그 - 개발 시 모든 레벨 + Loki 전송 + # 애플리케이션 로그 - 파일로만 저장 (Promtail이 읽어감) - name: site.icebang additivity: "false" level: DEBUG AppenderRef: - ref: console-appender - - ref: loki-appender - ref: file-dev-appender - ref: file-error-appender @@ -124,12 +101,11 @@ Configuration: additivity: "false" AppenderRef: - ref: workflow-appender - - ref: loki-appender - ref: console-appender - ref: file-dev-appender - ref: file-error-appender - # Spring Framework - 개발 시 필요한 정보만 + # Spring Framework - name: org.springframework additivity: "false" level: INFO @@ -137,45 +113,42 @@ Configuration: - ref: console-appender - ref: file-dev-appender - # Spring Security - 인증 디버깅용 + # Spring Security - name: org.springframework.security level: DEBUG additivity: "false" AppenderRef: - ref: console-appender - ref: file-dev-appender - - ref: loki-appender - # 웹 요청 로그 - API 개발 시 유용 + # 웹 요청 로그 - name: org.springframework.web level: DEBUG additivity: "false" AppenderRef: - ref: console-appender - ref: file-dev-appender - - ref: loki-appender - # 트랜잭션 로그 - DB 작업 디버깅 + # 트랜잭션 로그 - name: org.springframework.transaction level: DEBUG additivity: "false" AppenderRef: - ref: console-appender - ref: file-dev-appender - - ref: loki-appender # HikariCP 로그 비활성화 - name: com.zaxxer.hikari level: "OFF" - # SQL 로그 - 개발 시 쿼리 확인용 (필요시 활성화) + # SQL 로그 - name: org.hibernate.SQL level: DEBUG additivity: "false" AppenderRef: - ref: console-appender - # 파라미터 바인딩 로그 (필요시 활성화) + # 파라미터 바인딩 로그 - name: org.hibernate.type.descriptor.sql.BasicBinder level: TRACE additivity: "false" diff --git a/apps/user-service/src/main/resources/log4j2-production.yml b/apps/user-service/src/main/resources/log4j2-production.yml index 455b2f22..2e88bd19 100644 --- a/apps/user-service/src/main/resources/log4j2-production.yml +++ b/apps/user-service/src/main/resources/log4j2-production.yml @@ -12,10 +12,10 @@ Configuration: value: "UTF-8" # 프로덕션 환경용 콘솔 패턴 - 구조화된 로그 - name: "console-layout-pattern" - value: "%highlight{[%-5level]} [%X{traceId}] [%X{spanId}] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %logger{36} - %msg%n" - # 파일용 패턴 + value: "%highlight{[%-5level]} [%X{traceId}] [%X{spanId}] %d{HH:mm:ss}{UTC} [%t] %logger{20} - %msg% " + # 파일용 패턴 - Promtail이 파싱하기 쉽게 구조화 (UTC 시간 사용) - name: "file-layout-pattern" - value: "[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n" + value: "[%X{traceId}] [%X{spanId}] %d{yyyy-MM-dd HH:mm:ss.SSS}{UTC} [%t] %-5level %logger{36} - %msg%n" # 프로덕션 환경용 로그 파일들 - name: "prod-log" value: ${log-path}/production/app.log @@ -33,8 +33,9 @@ Configuration: # Loki Appender - 프로덕션 모니터링용 Loki: name: loki-appender - host: localhost - port: 3100 + url: ${LOKI_URL} # Grafana Cloud Loki URL + basicAuthUsername: ${LOKI_USERNAME} # Grafana Cloud 사용자 이름 + basicAuthPassword: ${LOKI_PASSWORD} # Grafana Cloud API Key JsonLayout: compact: true eventEol: true diff --git a/docker/local/docker-compose.yml b/docker/local/docker-compose.yml index 146f0534..6e27be91 100644 --- a/docker/local/docker-compose.yml +++ b/docker/local/docker-compose.yml @@ -50,6 +50,31 @@ services: networks: - icebang-network + promtail: + image: grafana/promtail:2.9.0 + container_name: promtail + restart: unless-stopped + ports: + - "9080:9080" + volumes: + - ./promtail-config.yml:/etc/promtail/config.yml:ro # config 파일 + - ./logs:/logs:cached + - promtail_positions:/var/lib/promtail # positions 파일용 writable volume + command: + - -config.file=/etc/promtail/config.yml + - -config.expand-env=true + ulimits: + nofile: + soft: 65535 + hard: 65535 + depends_on: + - loki + healthcheck: + test: ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:9080/ready || exit 1"] + interval: 30s + timeout: 10s + retries: 5 + grafana: image: grafana/grafana:10.1.0 container_name: grafana @@ -74,6 +99,7 @@ volumes: mariadb_data: loki_data: grafana_data: + promtail_positions: {} # Promtail positions 파일용 named volume networks: icebang-network: diff --git a/docker/local/promtail-config.yml b/docker/local/promtail-config.yml new file mode 100644 index 00000000..a55c64db --- /dev/null +++ b/docker/local/promtail-config.yml @@ -0,0 +1,49 @@ +server: + http_listen_port: 9080 + grpc_listen_port: 0 + +positions: + filename: /tmp/positions.yaml + +clients: + - url: http://localhost:3100/loki/api/v1/push + +scrape_configs: + - job_name: user-service-logs + static_configs: + - targets: + - localhost + labels: + job: user-service + app: user-service + env: develop + __path__: /logs/develop/app.log + pipeline_stages: + - regex: + expression: '^\[(?P[^\]]*)\] \[(?P[^\]]*)\] (?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}) \[(?P[^\]]+)\] (?P\w+)\s+(?P\S+) - (?P.*)$' + - labels: + traceId: + level: + thread: + logger: + spanId: + + - job_name: user-service-errors + static_configs: + - targets: + - localhost + labels: + job: user-service-errors + app: user-service + env: develop + log_type: error + __path__: /logs/develop/error.log + pipeline_stages: + - regex: + expression: '^\[(?P[^\]]*)\] \[(?P[^\]]*)\] (?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}) \[(?P[^\]]+)\] (?P\w+)\s+(?P\S+) - (?P.*)$' + - labels: + traceId: + level: + thread: + logger: + spanId: \ No newline at end of file diff --git a/docker/production/docker-compose.yml b/docker/production/docker-compose.yml index f3016096..deff1ca4 100644 --- a/docker/production/docker-compose.yml +++ b/docker/production/docker-compose.yml @@ -19,6 +19,8 @@ services: image: ghcr.io/kernel180-be12/final-4team-icebang/user-service:latest container_name: user-service restart: on-failure:3 + depends_on: + - promtail ports: - "8080:8080" networks: @@ -27,6 +29,25 @@ services: - .env.prod environment: - SPRING_PROFILES_ACTIVE=production + volumes: + - logs_volume:/logs + + promtail: + image: grafana/promtail:2.9.0 + container_name: promtail + restart: unless-stopped + volumes: + - ./promtail-config.yml:/etc/promtail/config.yml:ro + - logs_volume:/logs # Spring 로그 읽기 + command: + - -config.file=/etc/promtail/config.yml + - -config.expand-env=true + ulimits: + nofile: + soft: 65535 + hard: 65535 + env_file: + - .env.prod pre-processing-service: image: ghcr.io/kernel180-be12/final-4team-icebang/pre-processing-service:latest @@ -39,11 +60,13 @@ services: env_file: - .env.prod volumes: - - onnx_models:/app/models # ONNX 모델 저장용 볼륨 -> 서버에 만들어야함 + - onnx_models:/app/models # ONNX 모델 저장용 볼륨 volumes: caddy_data: caddy_config: + logs_volume: + driver: local onnx_models: driver: local diff --git a/docker/production/promtail-config.yml b/docker/production/promtail-config.yml new file mode 100644 index 00000000..30beb73a --- /dev/null +++ b/docker/production/promtail-config.yml @@ -0,0 +1,49 @@ +server: + http_listen_port: 9080 + grpc_listen_port: 0 + +positions: + filename: /tmp/positions.yaml + +clients: + - url: https://${LOKI_USERNAME}:${LOKI_PASSWORD}@${LOKI_HOST}/loki/api/v1/push + +scrape_configs: + - job_name: user-service-logs + static_configs: + - targets: + - localhost + labels: + job: user-service + app: user-service + env: production + __path__: /logs/production/app.log + pipeline_stages: + - regex: + expression: '^\[(?P[^\]]*)\] \[(?P[^\]]*)\] (?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}) \[(?P[^\]]+)\] (?P\w+)\s+(?P\S+) - (?P.*)$' + - labels: + traceId: + level: + thread: + logger: + spanId: + + - job_name: user-service-errors + static_configs: + - targets: + - localhost + labels: + job: user-service-errors + app: user-service + env: production + log_type: error + __path__: /logs/production/error.log + pipeline_stages: + - regex: + expression: '^\[(?P[^\]]*)\] \[(?P[^\]]*)\] (?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}) \[(?P[^\]]+)\] (?P\w+)\s+(?P\S+) - (?P.*)$' + - labels: + traceId: + level: + thread: + logger: + spanId: \ No newline at end of file From ef9584a4866120ec5e7cd8c845f14851b975691d Mon Sep 17 00:00:00 2001 From: kakusiA Date: Wed, 17 Sep 2025 16:31:49 +0900 Subject: [PATCH 53/57] =?UTF-8?q?refactor:=20=5Fdefault=5Fmappings=20url?= =?UTF-8?q?=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/middleware/ServiceLoggerMiddleware.py | 81 +++++++++++++++++-- 1 file changed, 75 insertions(+), 6 deletions(-) diff --git a/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py b/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py index 03dd986b..03142eb0 100644 --- a/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py +++ b/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py @@ -47,9 +47,11 @@ def __init__(self, app, service_mappings: Dict[str, Dict] = None, enable_rds: bo def _default_mappings(self) -> Dict[str, Dict]: """기본 서비스 매핑 설정""" return { + # 네이버 키워드 검색 "/keywords/search": { "service_type": "NAVER_CRAWLING", "track_params": [ + "tag", "keyword", "category", "startDate", @@ -57,27 +59,94 @@ def _default_mappings(self) -> Dict[str, Dict]: "job_id", "schedule_id", ], - "response_trackers": ["keyword", "total_keywords", "results_count"], + "response_trackers": ["keyword", "total_keyword", "success", "status"], + }, + # 블로그 RAG 콘텐츠 생성 + "/blogs/rag/create": { + "service_type": "BLOG_RAG_CREATE", + "track_params": [ + "keyword", + "product_info", + "content_type", + "target_length", + "job_id", + "schedule_id", + "schedule_his_id", + ], + "response_trackers": ["title", "content_length", "tags_count", "success", "status"], }, + # 블로그 배포 "/blogs/publish": { "service_type": "BLOG_PUBLISH", "track_params": [ "tag", - "title", - "content", - "tags", + "blog_id", + "post_title", + "post_content", + "post_tags", "job_id", "schedule_id", "schedule_his_id", ], "response_trackers": [ + "tag", + "post_title", + "post_url", + "published_at", + "publish_success", + "metadata", + "success", + "status", + ], + }, + # 상품 검색 + "/products/search": { + "service_type": "PRODUCT_SEARCH", + "track_params": [ + "keyword", + "job_id", + "schedule_id", + "schedule_his_id", + ], + "response_trackers": ["keyword", "search_results_count", "success", "status"], + }, + # 상품 매칭 + "/products/match": { + "service_type": "PRODUCT_MATCH", + "track_params": [ + "keyword", + "search_results", "job_id", "schedule_id", "schedule_his_id", - "status", - "metadata", ], + "response_trackers": ["keyword", "matched_products_count", "success", "status"], }, + # 상품 유사도 분석 + "/products/similarity": { + "service_type": "PRODUCT_SIMILARITY", + "track_params": [ + "keyword", + "matched_products", + "search_results", + "job_id", + "schedule_id", + "schedule_his_id", + ], + "response_trackers": ["keyword", "selected_product", "reason", "success", "status"], + }, + # 상품 크롤링 + "/products/crawl": { + "service_type": "PRODUCT_CRAWL", + "track_params": [ + "tag", + "product_url", + "job_id", + "schedule_id", + "schedule_his_id", + ], + "response_trackers": ["tag", "product_url", "product_detail", "crawled_at", "success", "status"], + } } async def dispatch(self, request: Request, call_next): From 8ca0225d5fb6af126a2377f2324ad875a9efdd55 Mon Sep 17 00:00:00 2001 From: can019 Date: Wed, 17 Sep 2025 16:41:09 +0900 Subject: [PATCH 54/57] =?UTF-8?q?fix:=20E2e=20=EC=88=98=ED=96=89=20?= =?UTF-8?q?=EC=A0=84=20truncate=EB=A5=BC=20=EA=B0=81=20=ED=85=8C=EC=8A=A4?= =?UTF-8?q?=ED=8A=B8=20=EC=8B=9C=20=EC=88=98=ED=96=89?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java | 2 +- .../site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java | 2 +- .../java/site/icebang/e2e/setup/support/E2eTestSupport.java | 2 -- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java index 8fea2764..03c5f899 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java @@ -15,7 +15,7 @@ import site.icebang.e2e.setup.support.E2eTestSupport; @Sql( - value = "classpath:sql/01-insert-internal-users.sql", + value = {"classpath:sql/00-truncate.sql", "classpath:sql/01-insert-internal-users.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS) @DisplayName("사용자 로그아웃 플로우 E2E 테스트") @E2eTest diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java index 1cf10e95..df66a7c6 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java @@ -15,7 +15,7 @@ import site.icebang.e2e.setup.support.E2eTestSupport; @Sql( - value = "classpath:sql/01-insert-internal-users.sql", + value = {"classpath:sql/00-truncate.sql", "classpath:sql/01-insert-internal-users.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS) @DisplayName("사용자 등록 플로우 E2E 테스트") class UserRegistrationFlowE2eTest extends E2eTestSupport { diff --git a/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java index 56a1259f..c2d10870 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java @@ -5,7 +5,6 @@ import org.springframework.boot.test.web.client.TestRestTemplate; import org.springframework.boot.test.web.server.LocalServerPort; import org.springframework.context.annotation.Import; -import org.springframework.test.context.jdbc.Sql; import org.springframework.test.web.servlet.MockMvc; import org.springframework.web.context.WebApplicationContext; import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper; @@ -16,7 +15,6 @@ @Import(E2eTestConfiguration.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) @E2eTest -@Sql(value = "classpath:sql/00-truncate.sql", executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS) public abstract class E2eTestSupport { @Autowired protected TestRestTemplate restTemplate; From ed728a5e4612a19b4f88b60f1bced43c1cf53222 Mon Sep 17 00:00:00 2001 From: kakusiA Date: Wed, 17 Sep 2025 18:48:27 +0900 Subject: [PATCH 55/57] =?UTF-8?q?refactor:=20-=20log=20pre-processing-app.?= =?UTF-8?q?log=EC=97=90=20=EB=A1=9C=EA=B7=B8=EA=B0=80=20=EC=8C=93=EC=9D=B4?= =?UTF-8?q?=EB=8F=84=EB=A1=9D=20=EC=BD=94=EB=93=9C=20=EC=88=98=EC=A0=95?= =?UTF-8?q?=EB=B0=8F=20=EA=B0=9C=EB=B0=9C,=20=EA=B8=B0=EC=A1=B4=20loki?= =?UTF-8?q?=EC=BD=94=EB=93=9C=20=EC=88=98=EC=A0=95=EC=A4=91?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/core/logging_config.py | 89 +++++++++++++++++++ apps/pre-processing-service/app/main.py | 4 + .../app/middleware/ServiceLoggerMiddleware.py | 8 +- .../app/middleware/logging.py | 12 +++ .../app/model/execution_log.py | 2 +- .../app/service/keyword_service.py | 14 +-- 6 files changed, 120 insertions(+), 9 deletions(-) create mode 100644 apps/pre-processing-service/app/core/logging_config.py diff --git a/apps/pre-processing-service/app/core/logging_config.py b/apps/pre-processing-service/app/core/logging_config.py new file mode 100644 index 00000000..2c30485c --- /dev/null +++ b/apps/pre-processing-service/app/core/logging_config.py @@ -0,0 +1,89 @@ +import os +from loguru import logger +import sys +from contextvars import ContextVar + +# trace_id context 변수 import +try: + from app.middleware.ServiceLoggerMiddleware import trace_id_context +except ImportError: + # 모듈이 아직 로드되지 않은 경우를 위한 기본값 + trace_id_context: ContextVar[str] = ContextVar("trace_id", default="") + + +def setup_file_logging(): + """ + PromTail을 통해 Loki로 전송하기 위한 파일 로깅 설정 + """ + # 기존 loguru 핸들러 제거 (기본 콘솔 출력 제거) + logger.remove() + + # 로그 파일 경로 설정 + log_file_path = "pre-processing-app.log" + + # trace_id를 포함한 간단한 포맷 문자열 사용 + def add_trace_id_filter(record): + try: + current_trace_id = trace_id_context.get() + if current_trace_id: + record["extra"]["trace_id"] = current_trace_id + else: + record["extra"]["trace_id"] = "" + except LookupError: + record["extra"]["trace_id"] = "" + return record + + # 파일 로깅에서 LoggingMiddleware 제외하는 필터 + def exclude_logging_middleware_filter(record): + # LoggingMiddleware의 로그는 파일에 기록하지 않음 + if record["name"] == "app.middleware.logging": + return False + return add_trace_id_filter(record) + + # 파일 로깅 핸들러 추가 - trace_id 포함, LoggingMiddleware 제외 + logger.add( + log_file_path, + format="[{extra[trace_id]}] {time:YYYY-MM-DD HH:mm:ss.SSS} | {level} | {name}:{function}:{line} | {message}", + level="DEBUG", + rotation="100 MB", # 100MB마다 로테이션 + retention="7 days", # 7일간 보관 + compression="zip", # 압축 + enqueue=True, # 멀티프로세스 안전 + serialize=False, # JSON 직렬화 비활성화 (PromTail에서 파싱) + backtrace=True, # 백트레이스 포함 + diagnose=True, # 진단 정보 포함 + filter=exclude_logging_middleware_filter + ) + + # 에러 레벨 이상은 별도 파일에도 기록 - trace_id 포함, LoggingMiddleware 제외 + logger.add( + "pre-processing-app-error.log", + format="[{extra[trace_id]}] {time:YYYY-MM-DD HH:mm:ss.SSS} | {level} | {name}:{function}:{line} | {message}", + level="ERROR", + rotation="50 MB", + retention="30 days", + compression="zip", + enqueue=True, + serialize=False, + backtrace=True, + diagnose=True, + filter=exclude_logging_middleware_filter + ) + + # 개발 환경에서는 콘솔 출력도 유지 + if os.getenv("ENVIRONMENT", "development") == "development": + logger.add( + sys.stdout, + format="[{extra[trace_id]}] {time:YYYY-MM-DD HH:mm:ss} | {level: <8} | {name}:{function}:{line} | {message}", + level="DEBUG", + colorize=False, # colorize 비활성화하여 태그 충돌 방지 + filter=add_trace_id_filter + ) + + logger.info("File logging setup completed for PromTail integration") + return logger + + +def get_logger(): + """구성된 로거 인스턴스 반환""" + return logger \ No newline at end of file diff --git a/apps/pre-processing-service/app/main.py b/apps/pre-processing-service/app/main.py index 9865d845..0cc19d26 100644 --- a/apps/pre-processing-service/app/main.py +++ b/apps/pre-processing-service/app/main.py @@ -5,6 +5,10 @@ from fastapi.exceptions import RequestValidationError from app.middleware.ServiceLoggerMiddleware import ServiceLoggerMiddleware +# 파일 로깅 설정 초기화 +from app.core.logging_config import setup_file_logging +setup_file_logging() + # --- 애플리케이션 구성 요소 임포트 --- from app.api.router import api_router from app.middleware.logging import LoggingMiddleware diff --git a/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py b/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py index 03142eb0..4d7e778f 100644 --- a/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py +++ b/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py @@ -13,7 +13,7 @@ from app.middleware.rds_logger import RDSLogger from app.middleware.loki_logger import LokiLogger -trace_id_context: ContextVar[str] = ContextVar("trace_id", default="NO_TRACE_ID") +trace_id_context: ContextVar[str] = ContextVar("trace_id", default="") class ServiceLoggerMiddleware(BaseHTTPMiddleware): @@ -42,7 +42,9 @@ def __init__(self, app, service_mappings: Dict[str, Dict] = None, enable_rds: bo # 로거 인스턴스 초기화 self.rds_logger = RDSLogger() if enable_rds else None - self.loki_logger = LokiLogger() if enable_loki else None + # Loki 직접 로깅 비활성화 - PromTail을 통해 파일로 로깅 + # self.loki_logger = LokiLogger() if enable_loki else None + self.loki_logger = None def _default_mappings(self) -> Dict[str, Dict]: """기본 서비스 매핑 설정""" @@ -159,7 +161,7 @@ async def dispatch(self, request: Request, call_next): return await call_next(request) # 2. 시작 로깅 - trace_id = request.headers.get("X-Request-ID", "NO_TRACE_ID") + trace_id = request.headers.get("X-Request-ID", "") trace_id_context.set(trace_id) start_time = time.time() diff --git a/apps/pre-processing-service/app/middleware/logging.py b/apps/pre-processing-service/app/middleware/logging.py index 9a8cb6a0..da80b924 100644 --- a/apps/pre-processing-service/app/middleware/logging.py +++ b/apps/pre-processing-service/app/middleware/logging.py @@ -3,11 +3,23 @@ from loguru import logger from starlette.middleware.base import BaseHTTPMiddleware +# trace_id context 변수 import +try: + from app.middleware.ServiceLoggerMiddleware import trace_id_context +except ImportError: + from contextvars import ContextVar + trace_id_context: ContextVar[str] = ContextVar("trace_id", default="") + class LoggingMiddleware(BaseHTTPMiddleware): async def dispatch(self, request: Request, call_next): start_time = time.time() + # trace_id 설정 (X-Request-ID 헤더에서) + current_trace_id = request.headers.get("X-Request-ID", "") + if current_trace_id: + trace_id_context.set(current_trace_id) + # 1. 요청 시작 로그 logger.info( "요청 시작: IP='{}' 메서드='{}' URL='{}'", diff --git a/apps/pre-processing-service/app/model/execution_log.py b/apps/pre-processing-service/app/model/execution_log.py index 069ab392..b8358d51 100644 --- a/apps/pre-processing-service/app/model/execution_log.py +++ b/apps/pre-processing-service/app/model/execution_log.py @@ -53,7 +53,7 @@ def to_loki_format(self, app_name: str = "pre-processing-service") -> Dict[str, labels = { "app": app_name, "env": "develop", - "traceId": self.trace_id or "NO_TRACE_ID", + "traceId": self.trace_id or "", "spanId": self.span_id, # 필요시 추가 "executionType": self.execution_type, "sourceId": str(self.source_id), diff --git a/apps/pre-processing-service/app/service/keyword_service.py b/apps/pre-processing-service/app/service/keyword_service.py index 01d61f37..0028fd6c 100644 --- a/apps/pre-processing-service/app/service/keyword_service.py +++ b/apps/pre-processing-service/app/service/keyword_service.py @@ -2,7 +2,7 @@ import random from app.utils.response import Response import httpx - +from loguru import logger from ..errors.CustomException import InvalidItemDataException from ..model.schemas import RequestNaverSearch from datetime import date, timedelta @@ -13,7 +13,7 @@ async def keyword_search(request: RequestNaverSearch) -> dict: 네이버 검색 요청을 처리하는 비즈니스 로직입니다. 입력받은 데이터를 기반으로 응답 데이터를 생성하여 딕셔너리로 반환합니다. """ - + logger.info(f"검색 플랫폼: {request.tag}") # 키워드 검색 if request.tag == "naver": trending_keywords = await search_naver_rank() @@ -25,8 +25,10 @@ async def keyword_search(request: RequestNaverSearch) -> dict: if not trending_keywords: raise InvalidItemDataException() + keyword = random.choice(list(trending_keywords.values())) + logger.info(f"선택된 키워드: {keyword}, 검색된 키워드 수: {len(trending_keywords)}") data = { - "keyword": random.choice(list(trending_keywords.values())), + "keyword": keyword, "total_keyword": trending_keywords, } return Response.ok(data) @@ -55,11 +57,13 @@ async def search_naver_rank() -> dict[int, str]: "50000009", ] category = random.choice(categorys) + logger.info(f"선택된 카테고리:{category}") today = date.today() yesterday = today - timedelta(days=1) # 3. 원하는 포맷(YYYY-MM-DD)으로 변환하기 end_date = today.strftime("%Y-%m-%d") + logger.info(f"실행 날짜: {end_date}") start_date = yesterday.strftime("%Y-%m-%d") keywords_dic = {} @@ -86,7 +90,7 @@ async def search_naver_rank() -> dict[int, str]: httpx.RequestError, json.JSONDecodeError, ) as e: - print(f"네이버 데이터랩에서 데이터를 가져오는 데 실패했습니다: {e}") + logger.error(f"네이버 데이터랩에서 데이터를 가져오는 데 실패했습니다: {e}") raise InvalidItemDataException return keywords_dic @@ -112,5 +116,5 @@ async def search_naver_store() -> dict[int, str]: return keyword_dict except (httpx.HTTPStatusError, httpx.RequestError, json.JSONDecodeError) as e: - print(f"네이버 스토어에서 데이터를 가져오는 데 실패했습니다: {e}") + logger.error(f"네이버 스토어에서 데이터를 가져오는 데 실패했습니다: {e}") raise InvalidItemDataException from e From 2c7d4a5eb7224902a6b8ad9e59f9831e42d4d6c3 Mon Sep 17 00:00:00 2001 From: kakusia Date: Thu, 18 Sep 2025 00:15:20 +0900 Subject: [PATCH 56/57] =?UTF-8?q?feat:=20-=20Fastapi=20promtail=20local=20?= =?UTF-8?q?=EA=B0=9C=EB=B0=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/core/logging_config.py | 11 +++++- apps/pre-processing-service/poetry.lock | 22 +++++------ docker/local/promtail-config.yml | 37 ++++++++++++++++++- 3 files changed, 56 insertions(+), 14 deletions(-) diff --git a/apps/pre-processing-service/app/core/logging_config.py b/apps/pre-processing-service/app/core/logging_config.py index 2c30485c..c86542cf 100644 --- a/apps/pre-processing-service/app/core/logging_config.py +++ b/apps/pre-processing-service/app/core/logging_config.py @@ -18,8 +18,15 @@ def setup_file_logging(): # 기존 loguru 핸들러 제거 (기본 콘솔 출력 제거) logger.remove() + + # 환경변수로 로그 디렉토리 설정 (기본값: logs/develop) + log_dir = "../../docker/local/logs/develop" + + # 로그 디렉토리가 없으면 생성 + # 로그 파일 경로 설정 - log_file_path = "pre-processing-app.log" + log_file_path = log_dir + "/pre-processing-app.log" + error_log_file_path = log_dir + "/pre-processing-app-error.log" # trace_id를 포함한 간단한 포맷 문자열 사용 def add_trace_id_filter(record): @@ -57,7 +64,7 @@ def exclude_logging_middleware_filter(record): # 에러 레벨 이상은 별도 파일에도 기록 - trace_id 포함, LoggingMiddleware 제외 logger.add( - "pre-processing-app-error.log", + error_log_file_path, format="[{extra[trace_id]}] {time:YYYY-MM-DD HH:mm:ss.SSS} | {level} | {name}:{function}:{line} | {message}", level="ERROR", rotation="50 MB", diff --git a/apps/pre-processing-service/poetry.lock b/apps/pre-processing-service/poetry.lock index 04b147b0..a1729b8b 100644 --- a/apps/pre-processing-service/poetry.lock +++ b/apps/pre-processing-service/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -623,19 +623,19 @@ files = [ [[package]] name = "fastapi" -version = "0.116.1" +version = "0.116.2" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565"}, - {file = "fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143"}, + {file = "fastapi-0.116.2-py3-none-any.whl", hash = "sha256:c3a7a8fb830b05f7e087d920e0d786ca1fc9892eb4e9a84b227be4c1bc7569db"}, + {file = "fastapi-0.116.2.tar.gz", hash = "sha256:231a6af2fe21cfa2c32730170ad8514985fc250bec16c9b242d3b94c835ef529"}, ] [package.dependencies] pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.40.0,<0.48.0" +starlette = ">=0.40.0,<0.49.0" typing-extensions = ">=4.8.0" [package.extras] @@ -866,14 +866,14 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] [[package]] name = "google-api-python-client" -version = "2.181.0" +version = "2.182.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "google_api_python_client-2.181.0-py3-none-any.whl", hash = "sha256:348730e3ece46434a01415f3d516d7a0885c8e624ce799f50f2d4d86c2475fb7"}, - {file = "google_api_python_client-2.181.0.tar.gz", hash = "sha256:d7060962a274a16a2c6f8fb4b1569324dbff11bfbca8eb050b88ead1dd32261c"}, + {file = "google_api_python_client-2.182.0-py3-none-any.whl", hash = "sha256:a9b071036d41a17991d8fbf27bedb61f2888a39ae5696cb5a326bf999b2d5209"}, + {file = "google_api_python_client-2.182.0.tar.gz", hash = "sha256:cb2aa127e33c3a31e89a06f39cf9de982db90a98dee020911b21013afafad35f"}, ] [package.dependencies] @@ -2890,14 +2890,14 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "starlette" -version = "0.47.3" +version = "0.48.0" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51"}, - {file = "starlette-0.47.3.tar.gz", hash = "sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9"}, + {file = "starlette-0.48.0-py3-none-any.whl", hash = "sha256:0764ca97b097582558ecb498132ed0c7d942f233f365b86ba37770e026510659"}, + {file = "starlette-0.48.0.tar.gz", hash = "sha256:7e8cee469a8ab2352911528110ce9088fdc6a37d9876926e73da7ce4aa4c7a46"}, ] [package.dependencies] diff --git a/docker/local/promtail-config.yml b/docker/local/promtail-config.yml index a55c64db..687e1251 100644 --- a/docker/local/promtail-config.yml +++ b/docker/local/promtail-config.yml @@ -46,4 +46,39 @@ scrape_configs: level: thread: logger: - spanId: \ No newline at end of file + spanId: + + - job_name: pre-processing-logs + static_configs: + - targets: + - localhost + labels: + job: pre-processing + app: pre-processing + env: develop + __path__: /logs/develop/pre-processing-app.log + pipeline_stages: + - regex: + expression: '^\[(?P[^\]]*)\] (?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}) \| (?P\w+) \| (?P[^:]+:[^:]+:\d+) \| (?P.*)$' + - labels: + traceId: + level: + logger: + + - job_name: pre-processing-errors + static_configs: + - targets: + - localhost + labels: + job: pre-processing-errors + app: pre-processing + env: develop + log_type: error + __path__: /logs/develop/pre-processing-app-error.log + pipeline_stages: + - regex: + expression: '^\[(?P[^\]]*)\] (?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}) \| (?P\w+) \| (?P[^:]+:[^:]+:\d+) \| (?P.*)$' + - labels: + traceId: + level: + logger: From 3d173c85519dca321a5ec9860d3d7257588a3847 Mon Sep 17 00:00:00 2001 From: kakusiA Date: Thu, 18 Sep 2025 10:53:29 +0900 Subject: [PATCH 57/57] =?UTF-8?q?style:=20FAST=20API=20=EC=BD=94=EB=93=9C?= =?UTF-8?q?=20=ED=8F=AC=EB=A7=B7=ED=8C=85?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/core/logging_config.py | 19 ++- apps/pre-processing-service/app/main.py | 1 + .../app/middleware/ServiceLoggerMiddleware.py | 121 +++++++++++++++--- .../app/middleware/logging.py | 1 + .../app/middleware/loki_logger.py | 28 ++-- .../app/middleware/rds_logger.py | 22 ++-- .../app/model/execution_log.py | 45 ++++--- .../app/service/keyword_service.py | 4 +- 8 files changed, 164 insertions(+), 77 deletions(-) diff --git a/apps/pre-processing-service/app/core/logging_config.py b/apps/pre-processing-service/app/core/logging_config.py index c86542cf..b2c10d88 100644 --- a/apps/pre-processing-service/app/core/logging_config.py +++ b/apps/pre-processing-service/app/core/logging_config.py @@ -18,7 +18,6 @@ def setup_file_logging(): # 기존 loguru 핸들러 제거 (기본 콘솔 출력 제거) logger.remove() - # 환경변수로 로그 디렉토리 설정 (기본값: logs/develop) log_dir = "../../docker/local/logs/develop" @@ -54,12 +53,12 @@ def exclude_logging_middleware_filter(record): level="DEBUG", rotation="100 MB", # 100MB마다 로테이션 retention="7 days", # 7일간 보관 - compression="zip", # 압축 - enqueue=True, # 멀티프로세스 안전 - serialize=False, # JSON 직렬화 비활성화 (PromTail에서 파싱) - backtrace=True, # 백트레이스 포함 - diagnose=True, # 진단 정보 포함 - filter=exclude_logging_middleware_filter + compression="zip", # 압축 + enqueue=True, # 멀티프로세스 안전 + serialize=False, # JSON 직렬화 비활성화 (PromTail에서 파싱) + backtrace=True, # 백트레이스 포함 + diagnose=True, # 진단 정보 포함 + filter=exclude_logging_middleware_filter, ) # 에러 레벨 이상은 별도 파일에도 기록 - trace_id 포함, LoggingMiddleware 제외 @@ -74,7 +73,7 @@ def exclude_logging_middleware_filter(record): serialize=False, backtrace=True, diagnose=True, - filter=exclude_logging_middleware_filter + filter=exclude_logging_middleware_filter, ) # 개발 환경에서는 콘솔 출력도 유지 @@ -84,7 +83,7 @@ def exclude_logging_middleware_filter(record): format="[{extra[trace_id]}] {time:YYYY-MM-DD HH:mm:ss} | {level: <8} | {name}:{function}:{line} | {message}", level="DEBUG", colorize=False, # colorize 비활성화하여 태그 충돌 방지 - filter=add_trace_id_filter + filter=add_trace_id_filter, ) logger.info("File logging setup completed for PromTail integration") @@ -93,4 +92,4 @@ def exclude_logging_middleware_filter(record): def get_logger(): """구성된 로거 인스턴스 반환""" - return logger \ No newline at end of file + return logger diff --git a/apps/pre-processing-service/app/main.py b/apps/pre-processing-service/app/main.py index 0cc19d26..4bbf3ff1 100644 --- a/apps/pre-processing-service/app/main.py +++ b/apps/pre-processing-service/app/main.py @@ -7,6 +7,7 @@ # 파일 로깅 설정 초기화 from app.core.logging_config import setup_file_logging + setup_file_logging() # --- 애플리케이션 구성 요소 임포트 --- diff --git a/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py b/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py index 4d7e778f..30d3475b 100644 --- a/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py +++ b/apps/pre-processing-service/app/middleware/ServiceLoggerMiddleware.py @@ -22,7 +22,13 @@ class ServiceLoggerMiddleware(BaseHTTPMiddleware): URL 패턴을 기반으로 자동으로 서비스 타입 식별 및 로깅 """ - def __init__(self, app, service_mappings: Dict[str, Dict] = None, enable_rds: bool = True, enable_loki: bool = True): + def __init__( + self, + app, + service_mappings: Dict[str, Dict] = None, + enable_rds: bool = True, + enable_loki: bool = True, + ): """ :param service_mappings: URL 패턴별 서비스 설정 :param enable_rds: RDS 로깅 활성화 여부 @@ -75,7 +81,13 @@ def _default_mappings(self) -> Dict[str, Dict]: "schedule_id", "schedule_his_id", ], - "response_trackers": ["title", "content_length", "tags_count", "success", "status"], + "response_trackers": [ + "title", + "content_length", + "tags_count", + "success", + "status", + ], }, # 블로그 배포 "/blogs/publish": { @@ -110,7 +122,12 @@ def _default_mappings(self) -> Dict[str, Dict]: "schedule_id", "schedule_his_id", ], - "response_trackers": ["keyword", "search_results_count", "success", "status"], + "response_trackers": [ + "keyword", + "search_results_count", + "success", + "status", + ], }, # 상품 매칭 "/products/match": { @@ -122,7 +139,12 @@ def _default_mappings(self) -> Dict[str, Dict]: "schedule_id", "schedule_his_id", ], - "response_trackers": ["keyword", "matched_products_count", "success", "status"], + "response_trackers": [ + "keyword", + "matched_products_count", + "success", + "status", + ], }, # 상품 유사도 분석 "/products/similarity": { @@ -135,7 +157,13 @@ def _default_mappings(self) -> Dict[str, Dict]: "schedule_id", "schedule_his_id", ], - "response_trackers": ["keyword", "selected_product", "reason", "success", "status"], + "response_trackers": [ + "keyword", + "selected_product", + "reason", + "success", + "status", + ], }, # 상품 크롤링 "/products/crawl": { @@ -147,8 +175,15 @@ def _default_mappings(self) -> Dict[str, Dict]: "schedule_id", "schedule_his_id", ], - "response_trackers": ["tag", "product_url", "product_detail", "crawled_at", "success", "status"], - } + "response_trackers": [ + "tag", + "product_url", + "product_detail", + "crawled_at", + "success", + "status", + ], + }, } async def dispatch(self, request: Request, call_next): @@ -205,8 +240,14 @@ async def dispatch(self, request: Request, call_next): # 외부 로깅 시스템에 성공 로그 전송 success_message = f"[{service_type}_SUCCESS]{param_str} status_code={response.status_code}" await self._log_to_external_systems( - "success", service_type, source_id, trace_id, success_message, - run_id, params, duration_ms=duration_ms + "success", + service_type, + source_id, + trace_id, + success_message, + run_id, + params, + duration_ms=duration_ms, ) else: await self._log_error_response( @@ -216,8 +257,15 @@ async def dispatch(self, request: Request, call_next): # 외부 로깅 시스템에 에러 로그 전송 error_message = f"[{service_type}_ERROR]{param_str} status_code={response.status_code}" await self._log_to_external_systems( - "error", service_type, source_id, trace_id, error_message, - run_id, params, duration_ms=duration_ms, error_code=f"HTTP_{response.status_code}" + "error", + service_type, + source_id, + trace_id, + error_message, + run_id, + params, + duration_ms=duration_ms, + error_code=f"HTTP_{response.status_code}", ) return response @@ -228,10 +276,19 @@ async def dispatch(self, request: Request, call_next): await self._log_exception(service_type, trace_id, start_time, param_str, e) # 외부 로깅 시스템에 예외 로그 전송 - exception_message = f"[{service_type}_EXCEPTION]{param_str} exception={str(e)}" + exception_message = ( + f"[{service_type}_EXCEPTION]{param_str} exception={str(e)}" + ) await self._log_to_external_systems( - "error", service_type, source_id, trace_id, exception_message, - run_id, params, duration_ms=duration_ms, error_code="EXCEPTION" + "error", + service_type, + source_id, + trace_id, + exception_message, + run_id, + params, + duration_ms=duration_ms, + error_code="EXCEPTION", ) raise @@ -387,7 +444,7 @@ async def _log_to_external_systems( run_id: Optional[int] = None, params: Optional[Dict[str, Any]] = None, duration_ms: Optional[int] = None, - error_code: Optional[str] = None + error_code: Optional[str] = None, ): """RDS와 Loki에 로그 전송""" tasks = [] @@ -402,11 +459,24 @@ async def _log_to_external_systems( ) elif log_type == "success": task = self.rds_logger.log_success( - service_type, source_id, trace_id, message, duration_ms, run_id, additional_data + service_type, + source_id, + trace_id, + message, + duration_ms, + run_id, + additional_data, ) elif log_type == "error": task = self.rds_logger.log_error( - service_type, source_id, trace_id, message, error_code, duration_ms, run_id, additional_data + service_type, + source_id, + trace_id, + message, + error_code, + duration_ms, + run_id, + additional_data, ) tasks.append(task) @@ -417,11 +487,24 @@ async def _log_to_external_systems( ) elif log_type == "success": task = self.loki_logger.log_success( - service_type, source_id, trace_id, message, duration_ms, run_id, additional_data + service_type, + source_id, + trace_id, + message, + duration_ms, + run_id, + additional_data, ) elif log_type == "error": task = self.loki_logger.log_error( - service_type, source_id, trace_id, message, error_code, duration_ms, run_id, additional_data + service_type, + source_id, + trace_id, + message, + error_code, + duration_ms, + run_id, + additional_data, ) tasks.append(task) diff --git a/apps/pre-processing-service/app/middleware/logging.py b/apps/pre-processing-service/app/middleware/logging.py index da80b924..15bfd757 100644 --- a/apps/pre-processing-service/app/middleware/logging.py +++ b/apps/pre-processing-service/app/middleware/logging.py @@ -8,6 +8,7 @@ from app.middleware.ServiceLoggerMiddleware import trace_id_context except ImportError: from contextvars import ContextVar + trace_id_context: ContextVar[str] = ContextVar("trace_id", default="") diff --git a/apps/pre-processing-service/app/middleware/loki_logger.py b/apps/pre-processing-service/app/middleware/loki_logger.py index 7870c563..0a4d603c 100644 --- a/apps/pre-processing-service/app/middleware/loki_logger.py +++ b/apps/pre-processing-service/app/middleware/loki_logger.py @@ -39,7 +39,7 @@ async def send_log( status: Optional[str] = None, duration_ms: Optional[int] = None, error_code: Optional[str] = None, - additional_data: Optional[dict] = None + additional_data: Optional[dict] = None, ) -> bool: """ Loki로 로그 전송 @@ -71,7 +71,7 @@ async def send_log( status=status, duration_ms=duration_ms, error_code=error_code, - reserved4=additional_data + reserved4=additional_data, ) loki_data = execution_log.to_loki_format(self.app_name) @@ -84,9 +84,9 @@ async def send_log( "values": [ [ str(loki_data["log"]["timestamp"]), - json.dumps(loki_data["log"], ensure_ascii=False) + json.dumps(loki_data["log"], ensure_ascii=False), ] - ] + ], } ] } @@ -97,14 +97,16 @@ async def send_log( self.loki_url, json=payload, headers={"Content-Type": "application/json"}, - timeout=aiohttp.ClientTimeout(total=5) + timeout=aiohttp.ClientTimeout(total=5), ) as response: if response.status == 204: # logger.debug(f"Loki 로그 전송 성공: {execution_type} - {log_message[:50]}...") return True else: response_text = await response.text() - logger.error(f"Loki 로그 전송 실패: status={response.status}, response={response_text}") + logger.error( + f"Loki 로그 전송 실패: status={response.status}, response={response_text}" + ) return False except asyncio.TimeoutError: @@ -121,7 +123,7 @@ async def log_start( trace_id: str, log_message: str, run_id: Optional[int] = None, - additional_data: Optional[dict] = None + additional_data: Optional[dict] = None, ) -> bool: """시작 로그 전송""" return await self.send_log( @@ -132,7 +134,7 @@ async def log_start( trace_id=trace_id, run_id=run_id, status="RUNNING", - additional_data=additional_data + additional_data=additional_data, ) async def log_success( @@ -143,7 +145,7 @@ async def log_success( log_message: str, duration_ms: int, run_id: Optional[int] = None, - additional_data: Optional[dict] = None + additional_data: Optional[dict] = None, ) -> bool: """성공 로그 전송""" return await self.send_log( @@ -155,7 +157,7 @@ async def log_success( run_id=run_id, status="SUCCESS", duration_ms=duration_ms, - additional_data=additional_data + additional_data=additional_data, ) async def log_error( @@ -167,7 +169,7 @@ async def log_error( error_code: str, duration_ms: Optional[int] = None, run_id: Optional[int] = None, - additional_data: Optional[dict] = None + additional_data: Optional[dict] = None, ) -> bool: """에러 로그 전송""" return await self.send_log( @@ -180,7 +182,7 @@ async def log_error( status="ERROR", duration_ms=duration_ms, error_code=error_code, - additional_data=additional_data + additional_data=additional_data, ) def __del__(self): @@ -193,4 +195,4 @@ def __del__(self): else: loop.run_until_complete(self.session.close()) except: - pass \ No newline at end of file + pass diff --git a/apps/pre-processing-service/app/middleware/rds_logger.py b/apps/pre-processing-service/app/middleware/rds_logger.py index f483a1fc..66bad19c 100644 --- a/apps/pre-processing-service/app/middleware/rds_logger.py +++ b/apps/pre-processing-service/app/middleware/rds_logger.py @@ -24,7 +24,7 @@ async def log_execution( status: Optional[str] = None, duration_ms: Optional[int] = None, error_code: Optional[str] = None, - additional_data: Optional[dict] = None + additional_data: Optional[dict] = None, ) -> bool: """ execution_log 테이블에 로그 저장 @@ -56,7 +56,7 @@ async def log_execution( status=status, duration_ms=duration_ms, error_code=error_code, - reserved4=additional_data + reserved4=additional_data, ) log_data = execution_log.to_dict() @@ -64,8 +64,8 @@ async def log_execution( # 컬럼명과 값 분리 columns = list(log_data.keys()) values = list(log_data.values()) - placeholders = ', '.join(['%s'] * len(values)) - columns_str = ', '.join(columns) + placeholders = ", ".join(["%s"] * len(values)) + columns_str = ", ".join(columns) insert_query = f""" INSERT INTO execution_log ({columns_str}) @@ -90,7 +90,7 @@ async def log_start( trace_id: str, log_message: str, run_id: Optional[int] = None, - additional_data: Optional[dict] = None + additional_data: Optional[dict] = None, ) -> bool: """시작 로그 저장""" return await self.log_execution( @@ -101,7 +101,7 @@ async def log_start( trace_id=trace_id, run_id=run_id, status="RUNNING", - additional_data=additional_data + additional_data=additional_data, ) async def log_success( @@ -112,7 +112,7 @@ async def log_success( log_message: str, duration_ms: int, run_id: Optional[int] = None, - additional_data: Optional[dict] = None + additional_data: Optional[dict] = None, ) -> bool: """성공 로그 저장""" return await self.log_execution( @@ -124,7 +124,7 @@ async def log_success( run_id=run_id, status="SUCCESS", duration_ms=duration_ms, - additional_data=additional_data + additional_data=additional_data, ) async def log_error( @@ -136,7 +136,7 @@ async def log_error( error_code: str, duration_ms: Optional[int] = None, run_id: Optional[int] = None, - additional_data: Optional[dict] = None + additional_data: Optional[dict] = None, ) -> bool: """에러 로그 저장""" return await self.log_execution( @@ -149,5 +149,5 @@ async def log_error( status="ERROR", duration_ms=duration_ms, error_code=error_code, - additional_data=additional_data - ) \ No newline at end of file + additional_data=additional_data, + ) diff --git a/apps/pre-processing-service/app/model/execution_log.py b/apps/pre-processing-service/app/model/execution_log.py index b8358d51..c1bef2e0 100644 --- a/apps/pre-processing-service/app/model/execution_log.py +++ b/apps/pre-processing-service/app/model/execution_log.py @@ -13,7 +13,7 @@ class ExecutionLog: log_level: str # INFO, ERROR, WARNING, DEBUG executed_at: datetime log_message: str - span_id: str =""#테스트값 + span_id: str = "" # 테스트값 trace_id: Optional[str] = None run_id: Optional[int] = None status: Optional[str] = None # SUCCESS, ERROR, RUNNING, PENDING @@ -29,25 +29,27 @@ class ExecutionLog: def to_dict(self) -> Dict[str, Any]: """딕셔너리로 변환 (DB 삽입용)""" data = { - 'execution_type': self.execution_type, - 'source_id': self.source_id, - 'log_level': self.log_level, - 'executed_at': self.executed_at, - 'log_message': self.log_message, - 'trace_id': self.trace_id, - 'run_id': self.run_id, - 'status': self.status, - 'duration_ms': self.duration_ms, - 'error_code': self.error_code, - 'reserved1': self.span_id, - 'reserved2': self.reserved2, - 'reserved3': self.reserved3, - 'reserved4': json.dumps(self.reserved4) if self.reserved4 else None, - 'reserved5': self.reserved5 + "execution_type": self.execution_type, + "source_id": self.source_id, + "log_level": self.log_level, + "executed_at": self.executed_at, + "log_message": self.log_message, + "trace_id": self.trace_id, + "run_id": self.run_id, + "status": self.status, + "duration_ms": self.duration_ms, + "error_code": self.error_code, + "reserved1": self.span_id, + "reserved2": self.reserved2, + "reserved3": self.reserved3, + "reserved4": json.dumps(self.reserved4) if self.reserved4 else None, + "reserved5": self.reserved5, } return {k: v for k, v in data.items() if v is not None} - def to_loki_format(self, app_name: str = "pre-processing-service") -> Dict[str, Any]: + def to_loki_format( + self, app_name: str = "pre-processing-service" + ) -> Dict[str, Any]: """Loki 형식으로 변환""" labels = { @@ -57,7 +59,7 @@ def to_loki_format(self, app_name: str = "pre-processing-service") -> Dict[str, "spanId": self.span_id, # 필요시 추가 "executionType": self.execution_type, "sourceId": str(self.source_id), - "runId": str(self.run_id) if self.run_id else "" + "runId": str(self.run_id) if self.run_id else "", } log_data = { @@ -68,13 +70,10 @@ def to_loki_format(self, app_name: str = "pre-processing-service") -> Dict[str, "source_id": self.source_id, "status": self.status, "duration_ms": self.duration_ms, - "error_code": self.error_code + "error_code": self.error_code, } if self.reserved4: log_data.update(self.reserved4) - return { - "labels": labels, - "log": log_data - } \ No newline at end of file + return {"labels": labels, "log": log_data} diff --git a/apps/pre-processing-service/app/service/keyword_service.py b/apps/pre-processing-service/app/service/keyword_service.py index 0028fd6c..b6341482 100644 --- a/apps/pre-processing-service/app/service/keyword_service.py +++ b/apps/pre-processing-service/app/service/keyword_service.py @@ -90,7 +90,9 @@ async def search_naver_rank() -> dict[int, str]: httpx.RequestError, json.JSONDecodeError, ) as e: - logger.error(f"네이버 데이터랩에서 데이터를 가져오는 데 실패했습니다: {e}") + logger.error( + f"네이버 데이터랩에서 데이터를 가져오는 데 실패했습니다: {e}" + ) raise InvalidItemDataException return keywords_dic