From c0c4e0ed7a07baaa43a9766d147c7a0a641d48c3 Mon Sep 17 00:00:00 2001 From: kakusiA Date: Fri, 19 Sep 2025 12:11:45 +0900 Subject: [PATCH 1/2] =?UTF-8?q?refactor:=20=ED=86=B5=ED=95=A9=20=ED=85=8C?= =?UTF-8?q?=EC=8A=A4=ED=8A=B8=20=EC=BD=94=EB=93=9C=20=ED=98=84=EC=9E=AC=20?= =?UTF-8?q?=EB=A1=9C=EC=A7=81=EC=97=90=20=EB=A7=9E=EA=B2=8C=20=EC=9E=AC?= =?UTF-8?q?=EC=84=A4=EA=B3=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/api/endpoints/test.py | 71 +++++++++++-------- .../app/model/schemas.py | 6 -- .../app/service/crawl_service.py | 1 - 3 files changed, 41 insertions(+), 37 deletions(-) diff --git a/apps/pre-processing-service/app/api/endpoints/test.py b/apps/pre-processing-service/app/api/endpoints/test.py index 91977a3f..6505bec6 100644 --- a/apps/pre-processing-service/app/api/endpoints/test.py +++ b/apps/pre-processing-service/app/api/endpoints/test.py @@ -1,4 +1,6 @@ # app/api/endpoints/embedding.py +from time import sleep + import loguru from fastapi import APIRouter from sqlalchemy import text @@ -8,14 +10,17 @@ from fastapi import APIRouter from typing import Mapping, Any, Dict from ...model.schemas import * +from ...service.blog.blog_create_service import BlogContentService from ...service.blog.naver_blog_post_service import NaverBlogPostService from ...service.blog.tistory_blog_post_service import TistoryBlogPostService +from ...service.crawl_service import CrawlService from ...service.keyword_service import keyword_search from ...service.match_service import MatchService from ...service.search_service import SearchService # from ...service.similarity_service import SimilarityService from ...db.db_connecter import engine # ✅ 우리가 만든 DB 유틸 임포트 +from ...service.similarity_service import SimilarityService # 이 파일만의 독립적인 라우터를 생성합니다. router = APIRouter() @@ -62,54 +67,60 @@ def with_meta(data: Mapping[str, Any], meta: Mapping[str, Any]) -> Dict[str, Any @router.get("/tester", response_model=None) async def processing_tester(): - request_dict = { - "tag": "naver", - "category": "50000000", - "start_date": "2025-09-01", - "end_date": "2025-09-02", - } # 네이버 키워드 검색 - naver_request = RequestNaverSearch(**with_meta(request_dict)) + naver_request = RequestNaverSearch(tag="naver") response_data = await keyword_search(naver_request) - keyword = response_data.get("keyword") + keyword = response_data["data"].get("keyword") loguru.logger.info(keyword) - keyword = { - "keyword": keyword, - } - # 싸다구 상품 검색 - sadagu_request = RequestSadaguSearch(**with_meta(keyword)) - search_service = SearchService() - keyword_result = await search_service.search_products(sadagu_request) + sadagu_request = RequestSadaguSearch(keyword=keyword) + searchservice = SearchService() + keyword_result = await searchservice.search_products(sadagu_request) loguru.logger.info(keyword_result) # 싸다구 상품 매치 - keyword["search_results"] = keyword_result.get("search_results") - keyword_match_request = RequestSadaguMatch(**with_meta(keyword)) + + data = keyword_result["data"] + keyword_match_request = RequestSadaguMatch(keyword=data.get("keyword"),search_results=data.get("search_results")) match_service = MatchService() keyword_match_response = match_service.match_products(keyword_match_request) loguru.logger.info(keyword_match_response) # 싸다구 상품 유사도 분석 - keyword["matched_products"] = keyword_match_response.get("matched_products") - keyword_similarity_request = RequestSadaguSimilarity(**with_meta(keyword)) - # similarity_service = SimilarityService() - # keyword_similarity_response = similarity_service.select_product_by_similarity( - # keyword_similarity_request - # ) - # loguru.logger.info(keyword_similarity_response) - + data = keyword_match_response["data"] + keyword_similarity_request = RequestSadaguSimilarity(keyword=data.get("keyword"),matched_products=data.get("matched_products")) + similarity_service = SimilarityService() + keyword_similarity_response = similarity_service.select_product_by_similarity( + keyword_similarity_request + ) + loguru.logger.info(keyword_similarity_response) + sleep(5) # 싸다구 상품 크롤링 + a = RequestSadaguCrawl(product_url=keyword_similarity_response["data"]["selected_product"].get("url")) + crawl = CrawlService() + crawl_response = await crawl.crawl_product_detail(a) + loguru.logger.info(crawl_response) + sleep(5) # 블로그 생성 + data = crawl_response + rag= RequestBlogCreate(product_info=data.get("product_detail"),target_length=500) + blog_service = BlogContentService() + rag_data = blog_service.generate_blog_content(rag) + loguru.logger.info(rag_data) + sleep(15) # 블로그 배포 - tistory_service = TistoryBlogPostService() - result = tistory_service.post_content( - title="안녕하살법", - content="안녕하살법 받아치기러기 코드 받아치기", - tags=["퉁퉁퉁사후르", "짜라짜라"], + data = rag_data + # tistory_service = TistoryBlogPostService() + naverblogPostService = NaverBlogPostService() + result = naverblogPostService.post_content( + # blog_id="wtecho331", + # blog_pw="wt505033@#", + title=data.get("title"), + content=data.get("content"), + tags=data.get("tags"), ) loguru.logger.info(result) diff --git a/apps/pre-processing-service/app/model/schemas.py b/apps/pre-processing-service/app/model/schemas.py index 18d0d99f..36bef959 100644 --- a/apps/pre-processing-service/app/model/schemas.py +++ b/apps/pre-processing-service/app/model/schemas.py @@ -129,11 +129,6 @@ class ResponseSadaguSimilarity(ResponseBase[SadaguSimilarityData]): class RequestSadaguCrawl(RequestBase): - tag: str = Field( - ..., - title="크롤링 태그", - description="크롤링 유형을 구분하는 태그 (예: 'detail')", - ) product_url: HttpUrl = Field( ..., title="상품 URL", description="크롤링할 상품 페이지의 URL" ) @@ -141,7 +136,6 @@ class RequestSadaguCrawl(RequestBase): # 응답 데이터 모델 class SadaguCrawlData(BaseModel): - tag: str = Field(..., title="크롤링 태그", description="크롤링 유형 태그") product_url: str = Field(..., title="상품 URL", description="크롤링된 상품 URL") product_detail: Optional[Dict] = Field( None, title="상품 상세정보", description="크롤링된 상품의 상세 정보" diff --git a/apps/pre-processing-service/app/service/crawl_service.py b/apps/pre-processing-service/app/service/crawl_service.py index f54d4db7..df90ba01 100644 --- a/apps/pre-processing-service/app/service/crawl_service.py +++ b/apps/pre-processing-service/app/service/crawl_service.py @@ -38,7 +38,6 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: # 응답 데이터 구성 data = { - "tag": request.tag, "product_url": str(request.product_url), "product_detail": product_detail, "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), From cfd662276d00a2e1109feb90d23234399b0d5d09 Mon Sep 17 00:00:00 2001 From: kakusiA Date: Fri, 19 Sep 2025 12:14:15 +0900 Subject: [PATCH 2/2] style: FASTAPI test code formating --- .../app/api/endpoints/test.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/apps/pre-processing-service/app/api/endpoints/test.py b/apps/pre-processing-service/app/api/endpoints/test.py index 6505bec6..ca1a43b5 100644 --- a/apps/pre-processing-service/app/api/endpoints/test.py +++ b/apps/pre-processing-service/app/api/endpoints/test.py @@ -82,14 +82,18 @@ async def processing_tester(): # 싸다구 상품 매치 data = keyword_result["data"] - keyword_match_request = RequestSadaguMatch(keyword=data.get("keyword"),search_results=data.get("search_results")) + keyword_match_request = RequestSadaguMatch( + keyword=data.get("keyword"), search_results=data.get("search_results") + ) match_service = MatchService() keyword_match_response = match_service.match_products(keyword_match_request) loguru.logger.info(keyword_match_response) # 싸다구 상품 유사도 분석 data = keyword_match_response["data"] - keyword_similarity_request = RequestSadaguSimilarity(keyword=data.get("keyword"),matched_products=data.get("matched_products")) + keyword_similarity_request = RequestSadaguSimilarity( + keyword=data.get("keyword"), matched_products=data.get("matched_products") + ) similarity_service = SimilarityService() keyword_similarity_response = similarity_service.select_product_by_similarity( keyword_similarity_request @@ -97,7 +101,9 @@ async def processing_tester(): loguru.logger.info(keyword_similarity_response) sleep(5) # 싸다구 상품 크롤링 - a = RequestSadaguCrawl(product_url=keyword_similarity_response["data"]["selected_product"].get("url")) + a = RequestSadaguCrawl( + product_url=keyword_similarity_response["data"]["selected_product"].get("url") + ) crawl = CrawlService() crawl_response = await crawl.crawl_product_detail(a) loguru.logger.info(crawl_response) @@ -105,7 +111,7 @@ async def processing_tester(): sleep(5) # 블로그 생성 data = crawl_response - rag= RequestBlogCreate(product_info=data.get("product_detail"),target_length=500) + rag = RequestBlogCreate(product_info=data.get("product_detail"), target_length=500) blog_service = BlogContentService() rag_data = blog_service.generate_blog_content(rag) loguru.logger.info(rag_data)