From d2c933697748e48439129c62e37362907d1d8409 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EA=B2=BD=EB=AF=BC?= <153978154+kakusiA@users.noreply.github.com> Date: Tue, 23 Sep 2025 18:30:44 +0900 Subject: [PATCH 01/18] =?UTF-8?q?=EC=9B=8C=ED=81=AC=ED=94=8C=EB=A1=9C?= =?UTF-8?q?=EC=9A=B0=20default=5Fconfig=20=EC=82=AC=EC=9A=A9=20=EB=A1=9C?= =?UTF-8?q?=EC=A7=81=EA=B0=9C=EB=B0=9C=20(#190)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * :fix - 스케줄 스키마 유니크 제약조건 제거 - workflow insert문 default_config 로직에 맞게 수정 * refactor: 워크 플로우 하드코딩 제거 1. workflow 테이블에서 default_config값 가져오기 2. 각 task_id에맞게 config값이 setting에 들어가도록 수정 3. 각각의 taskBuilder에 하드코딩된값 변경 * fix: 코드 머지후 다시 코드 작성 * style: springBoot 코드 포맷팅 * fix:v0.0.5 Alter 스키마 삭제 --- .../icebang/domain/workflow/dto/TaskDto.java | 4 +-- .../icebang/domain/workflow/model/Task.java | 3 ++ .../fastapi/body/BlogPublishBodyBuilder.java | 9 +++--- .../body/KeywordSearchBodyBuilder.java | 3 +- .../service/WorkflowExecutionService.java | 30 ++++++++++++++----- .../src/main/resources/sql/01-schema.sql | 2 ++ .../main/resources/sql/03-insert-workflow.sql | 2 +- 7 files changed, 38 insertions(+), 15 deletions(-) diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskDto.java index 569e93dc..fa83fe7d 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskDto.java @@ -11,9 +11,9 @@ public class TaskDto { private Long id; private String name; private String type; + private Integer executionOrder; + private JsonNode settings; private JsonNode parameters; private LocalDateTime createdAt; private LocalDateTime updatedAt; - - private Integer executionOrder; } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java index 713e460f..2c917100 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java @@ -24,6 +24,8 @@ public class Task { /** Task 실행에 필요한 파라미터 (JSON) 예: {"url": "http://...", "method": "POST", "body": {...}} */ private JsonNode parameters; + private JsonNode settings; + private LocalDateTime createdAt; private LocalDateTime updatedAt; @@ -32,6 +34,7 @@ public Task(TaskDto taskDto) { this.id = taskDto.getId(); this.name = taskDto.getName(); this.type = taskDto.getType(); + this.settings = taskDto.getSettings(); this.parameters = taskDto.getParameters(); } } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/BlogPublishBodyBuilder.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/BlogPublishBodyBuilder.java index d0967857..ed148061 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/BlogPublishBodyBuilder.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/BlogPublishBodyBuilder.java @@ -49,10 +49,11 @@ public ObjectNode build(Task task, Map workflowContext) { .filter(node -> !node.isMissingNode()) .ifPresent(tagsNode -> body.set("post_tags", tagsNode)); }); - - body.put("tag", "Blogger"); - body.put("blog_id", ""); - body.put("blog_pw", ""); + String blog_name = task.getSettings().path("blog_name").asText(""); + body.put("tag", task.getSettings().get("tag").asText()); + body.put("blog_name", blog_name); + body.put("blog_id", task.getSettings().get("blog_id").asText()); + body.put("blog_pw", task.getSettings().get("blog_pw").asText()); return body; } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/KeywordSearchBodyBuilder.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/KeywordSearchBodyBuilder.java index 17add786..597ab0b7 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/KeywordSearchBodyBuilder.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/KeywordSearchBodyBuilder.java @@ -27,6 +27,7 @@ public boolean supports(String taskName) { @Override public ObjectNode build(Task task, Map workflowContext) { // 이 Task는 항상 정적인 Body를 가집니다. - return objectMapper.createObjectNode().put("tag", "naver"); + String tag = task.getSettings().get("tag").asText(); + return objectMapper.createObjectNode().put("tag", tag); } } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java index a27807ec..c434cb6f 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java @@ -1,5 +1,6 @@ package site.icebang.domain.workflow.service; +import java.math.BigInteger; import java.util.Comparator; import java.util.HashMap; import java.util.List; @@ -11,6 +12,8 @@ import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -19,11 +22,9 @@ import site.icebang.domain.workflow.dto.JobDto; import site.icebang.domain.workflow.dto.TaskDto; +import site.icebang.domain.workflow.dto.WorkflowDetailCardDto; import site.icebang.domain.workflow.manager.ExecutionMdcManager; -import site.icebang.domain.workflow.mapper.JobMapper; -import site.icebang.domain.workflow.mapper.JobRunMapper; -import site.icebang.domain.workflow.mapper.TaskRunMapper; -import site.icebang.domain.workflow.mapper.WorkflowRunMapper; +import site.icebang.domain.workflow.mapper.*; import site.icebang.domain.workflow.model.Job; import site.icebang.domain.workflow.model.JobRun; import site.icebang.domain.workflow.model.Task; @@ -44,6 +45,7 @@ public class WorkflowExecutionService { private final List bodyBuilders; private final ExecutionMdcManager mdcManager; private final TaskExecutionService taskExecutionService; + private final WorkflowMapper workflowMapper; @Transactional @Async("traceExecutor") @@ -55,7 +57,9 @@ public void executeWorkflow(Long workflowId) { workflowRunMapper.insert(workflowRun); Map workflowContext = new HashMap<>(); - + WorkflowDetailCardDto settings = + workflowMapper.selectWorkflowDetailById(BigInteger.valueOf(workflowId)); + JsonNode setting = objectMapper.readTree(settings.getDefaultConfig()); // 📌 Mapper로부터 JobDto 리스트를 조회합니다. List jobDtos = jobMapper.findJobsByWorkflowId(workflowId); // 📌 JobDto를 execution_order 기준으로 정렬합니다. @@ -78,7 +82,7 @@ public void executeWorkflow(Long workflowId) { workflowLogger.info( "---------- Job 실행 시작: JobId={}, JobRunId={} ----------", job.getId(), jobRun.getId()); - boolean jobSucceeded = executeTasksForJob(jobRun, workflowContext); + boolean jobSucceeded = executeTasksForJob(jobRun, workflowContext, setting); jobRun.finish(jobSucceeded ? "SUCCESS" : "FAILED"); jobRunMapper.update(jobRun); @@ -96,13 +100,25 @@ public void executeWorkflow(Long workflowId) { "========== 워크플로우 실행 {} : WorkflowRunId={} ==========", hasAnyJobFailed ? "실패" : "성공", workflowRun.getId()); + } catch (JsonMappingException e) { + throw new RuntimeException(e); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); } finally { mdcManager.clearExecutionContext(); } } - private boolean executeTasksForJob(JobRun jobRun, Map workflowContext) { + private boolean executeTasksForJob( + JobRun jobRun, Map workflowContext, JsonNode setting) { List taskDtos = jobMapper.findTasksByJobId(jobRun.getJobId()); + for (TaskDto taskDto : taskDtos) { + String taskId = taskDto.getId().toString(); + JsonNode settingForTask = setting.get(taskId); + if (settingForTask != null) { + taskDto.setSettings(settingForTask); + } + } taskDtos.sort( Comparator.comparing( TaskDto::getExecutionOrder, Comparator.nullsLast(Comparator.naturalOrder())) diff --git a/apps/user-service/src/main/resources/sql/01-schema.sql b/apps/user-service/src/main/resources/sql/01-schema.sql index 31242c33..35d42e59 100644 --- a/apps/user-service/src/main/resources/sql/01-schema.sql +++ b/apps/user-service/src/main/resources/sql/01-schema.sql @@ -333,3 +333,5 @@ CREATE INDEX idx_log_level_status ON execution_log(log_level, status); CREATE INDEX idx_error_code ON execution_log(error_code); CREATE INDEX idx_duration ON execution_log(duration_ms); CREATE INDEX idx_execution_type_source ON execution_log(execution_type, source_id); + + diff --git a/apps/user-service/src/main/resources/sql/03-insert-workflow.sql b/apps/user-service/src/main/resources/sql/03-insert-workflow.sql index 0660b31f..9238b8a2 100644 --- a/apps/user-service/src/main/resources/sql/03-insert-workflow.sql +++ b/apps/user-service/src/main/resources/sql/03-insert-workflow.sql @@ -16,7 +16,7 @@ DELETE FROM `workflow`; -- 워크플로우 생성 (ID: 1) INSERT INTO `workflow` (`id`, `name`, `description`, `created_by`, `default_config`) VALUES (1, '상품 분석 및 블로그 자동 발행', '키워드 검색부터 상품 분석 후 블로그 발행까지의 자동화 프로세스', 1, - JSON_OBJECT('keyword_search',json_object('tag','naver'),'blog_publish',json_object('tag','naver_blog','blog_id', 'wtecho331', 'blog_pw', 'testpass'))) + JSON_OBJECT('1',json_object('tag','naver'),'8',json_object('tag','naver_blog','blog_id', 'wtecho331', 'blog_pw', 'testpass'))) ON DUPLICATE KEY UPDATE name = VALUES(name), description = VALUES(description), From b09558fd86a55fb1c5d982ca6332ee3270427f4d Mon Sep 17 00:00:00 2001 From: bwnfo3 <142577603+bwnfo3@users.noreply.github.com> Date: Thu, 25 Sep 2025 13:07:48 +0900 Subject: [PATCH 02/18] =?UTF-8?q?check-session,=20permissons=20api?= =?UTF-8?q?=EC=97=90=20=EB=8C=80=ED=95=9C=20=ED=85=8C=EC=8A=A4=ED=8A=B8=20?= =?UTF-8?q?=EC=BD=94=EB=93=9C=20=EC=9E=91=EC=84=B1=20(#200)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tests/auth/AuthApiIntegrationTest.java | 236 ++++++++++++++++++ 1 file changed, 236 insertions(+) diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java index 4fe3b00d..b6e0e237 100644 --- a/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java @@ -2,6 +2,7 @@ import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; import static com.epages.restdocs.apispec.ResourceDocumentation.*; +import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; import static org.springframework.restdocs.operation.preprocess.Preprocessors.*; import static org.springframework.restdocs.payload.PayloadDocumentation.*; @@ -138,4 +139,239 @@ void logout_success() throws Exception { .description("HTTP 상태")) .build()))); } + + @Test + @DisplayName("세션 확인 - 인증된 사용자") + void checkSession_authenticated_success() throws Exception { + // given - 먼저 로그인하여 세션 생성 + Map loginRequest = new HashMap<>(); + loginRequest.put("email", "admin@icebang.site"); + loginRequest.put("password", "qwer1234!A"); + + MockHttpSession session = new MockHttpSession(); + + // 로그인 먼저 수행 + mockMvc + .perform( + post(getApiUrlForDocs("/v0/auth/login")) + .contentType(MediaType.APPLICATION_JSON) + .session(session) + .content(objectMapper.writeValueAsString(loginRequest))) + .andExpect(status().isOk()); + + // when & then - 세션 확인 수행 + mockMvc + .perform( + get(getApiUrlForDocs("/v0/auth/check-session")) + .session(session) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.status").value("OK")) + .andExpect(jsonPath("$.message").value("OK")) + .andExpect(jsonPath("$.data").value(true)) + .andDo( + document( + "auth-check-session-authenticated", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Authentication") + .summary("세션 확인 - 인증된 상태") + .description("현재 사용자의 인증 세션이 유효한지 확인합니다 (인증된 경우)") + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부"), + fieldWithPath("data") + .type(JsonFieldType.BOOLEAN) + .description("세션 유효 여부 (인증된 경우 true)"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태")) + .build()))); + } + + @Test + @DisplayName("세션 확인 - 미인증 사용자") + void checkSession_unauthenticated_returns_unauthorized() throws Exception { + // given - 세션 없이 요청 (미인증 상태) + + // when & then - 세션 확인 수행 + mockMvc + .perform( + get(getApiUrlForDocs("/v0/auth/check-session")) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isUnauthorized()) + .andExpect(jsonPath("$.success").value(false)) + .andExpect(jsonPath("$.status").value("UNAUTHORIZED")) + .andExpect(jsonPath("$.message").value("Authentication required")) + .andExpect(jsonPath("$.data").isEmpty()) + .andDo( + document( + "auth-check-session-unauthenticated", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Authentication") + .summary("세션 확인 - 미인증 상태") + .description("인증되지 않은 상태에서 세션 확인 시 401 Unauthorized를 반환합니다") + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부 (미인증 시 false)"), + fieldWithPath("data") + .type(JsonFieldType.NULL) // BOOLEAN -> NULL로 변경 + .description("응답 데이터 (미인증 시 null)"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지 (Authentication required)"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태 (UNAUTHORIZED)")) + .build()))); + } + + @Test + @DisplayName("권한 정보 조회 - 인증된 사용자") + void getPermissions_authenticated_success() throws Exception { + // given - 먼저 로그인하여 세션 생성 + Map loginRequest = new HashMap<>(); + loginRequest.put("email", "admin@icebang.site"); + loginRequest.put("password", "qwer1234!A"); + + MockHttpSession session = new MockHttpSession(); + + // 로그인 먼저 수행 + mockMvc + .perform( + post(getApiUrlForDocs("/v0/auth/login")) + .contentType(MediaType.APPLICATION_JSON) + .session(session) + .content(objectMapper.writeValueAsString(loginRequest))) + .andExpect(status().isOk()); + + // when & then - 권한 정보 조회 수행 + mockMvc + .perform( + get(getApiUrlForDocs("/v0/auth/permissions")) + .session(session) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.status").value("OK")) + .andExpect(jsonPath("$.message").value("OK")) + .andExpect(jsonPath("$.data").isNotEmpty()) + .andExpect(jsonPath("$.data.email").value("admin@icebang.site")) + .andDo( + document( + "auth-get-permissions-authenticated", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Authentication") + .summary("권한 정보 조회 - 인증된 상태") + .description("현재 인증된 사용자의 상세 정보와 권한을 조회합니다") + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부"), + fieldWithPath("data") + .type(JsonFieldType.OBJECT) + .description("사용자 인증 정보"), + fieldWithPath("data.id") + .type(JsonFieldType.NUMBER) + .description("사용자 고유 ID"), + fieldWithPath("data.email") + .type(JsonFieldType.STRING) + .description("사용자 이메일 주소"), + fieldWithPath("data.password") + .type(JsonFieldType.STRING) + .description("사용자 비밀번호"), + fieldWithPath("data.status") + .type(JsonFieldType.STRING) + .description("사용자 계정 상태"), + fieldWithPath("data.roles") + .type(JsonFieldType.ARRAY) + .description("사용자 권한 목록"), + fieldWithPath("data.enabled") + .type(JsonFieldType.BOOLEAN) + .description("계정 활성화 여부"), + fieldWithPath("data.username") + .type(JsonFieldType.STRING) + .description("사용자명 (이메일과 동일)"), + fieldWithPath("data.authorities") + .type(JsonFieldType.ARRAY) + .description("Spring Security 권한 목록"), + fieldWithPath("data.authorities[].authority") + .type(JsonFieldType.STRING) + .description("개별 권한"), + fieldWithPath("data.credentialsNonExpired") + .type(JsonFieldType.BOOLEAN) + .description("자격증명 만료 여부"), + fieldWithPath("data.accountNonExpired") + .type(JsonFieldType.BOOLEAN) + .description("계정 만료 여부"), + fieldWithPath("data.accountNonLocked") + .type(JsonFieldType.BOOLEAN) + .description("계정 잠금 여부"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태")) + .build()))); + } + + @Test + @DisplayName("권한 정보 조회 - 미인증 사용자") + void getPermissions_unauthenticated_returns_unauthorized() throws Exception { + // given - 세션 없이 요청 (미인증 상태) + + // when & then - 권한 정보 조회 수행 (401 Unauthorized 응답 예상) + mockMvc + .perform( + get(getApiUrlForDocs("/v0/auth/permissions")) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isUnauthorized()) + .andExpect(jsonPath("$.success").value(false)) + .andExpect(jsonPath("$.status").value("UNAUTHORIZED")) + .andExpect(jsonPath("$.message").value("Authentication required")) + .andExpect(jsonPath("$.data").isEmpty()) + .andDo( + document( + "auth-get-permissions-unauthenticated", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Authentication") + .summary("권한 정보 조회 - 미인증 상태") + .description("인증되지 않은 상태에서 권한 정보 조회 시 401 Unauthorized를 반환합니다") + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부 (미인증 시 false)"), + fieldWithPath("data") + .type(JsonFieldType.NULL) + .description("응답 데이터 (미인증 시 null)"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지 (Authentication required)"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태 (UNAUTHORIZED)")) + .build()))); + } } From 0fd54d66959a0eb63bceabcf160b83b06fd78f3b Mon Sep 17 00:00:00 2001 From: thkim7 Date: Thu, 25 Sep 2025 15:34:10 +0900 Subject: [PATCH 03/18] =?UTF-8?q?feat:=20S3=20=ED=95=98=EA=B3=A0=20RDB=20?= =?UTF-8?q?=EC=97=B0=EB=8F=99=201.=20body=20builder=20=EC=B6=94=EA=B0=80?= =?UTF-8?q?=202.=20s3=5Fupload=20+=20=EC=9E=84=EC=8B=9C=201=EA=B0=9C=20?= =?UTF-8?q?=EB=BD=91=EA=B8=B0=20->=20rag=5Fcreate=20=EB=A5=BC=20s3=5Fuploa?= =?UTF-8?q?d=20+=20rdb=20=EB=8D=B0=EC=9D=B4=ED=84=B0=20=EC=82=BD=EC=9E=85?= =?UTF-8?q?=20->=20=EC=9E=84=EC=8B=9C=201=EA=B0=9C=20=EB=BD=91=EA=B8=B0=20?= =?UTF-8?q?->=20rag=5Fcreate=EB=A1=9C=20=EB=B3=80=EA=B2=BD=203.=20?= =?UTF-8?q?=EB=B3=80=EA=B2=BD=EC=97=90=20=EB=94=B0=EB=9D=BC=20schemas=20?= =?UTF-8?q?=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/api/endpoints/product.py | 17 ++ .../app/model/schemas.py | 28 ++- .../app/service/product_selection_service.py | 191 ++++++++++++++++ .../app/service/s3_upload_service.py | 204 +++++++++--------- .../fastapi/body/BlogRagBodyBuilder.java | 66 +++--- .../body/ProductSelectBodyBuilder.java | 40 ++++ .../fastapi/body/S3UploadBodyBuilder.java | 62 +++--- 7 files changed, 434 insertions(+), 174 deletions(-) create mode 100644 apps/pre-processing-service/app/service/product_selection_service.py create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/ProductSelectBodyBuilder.java diff --git a/apps/pre-processing-service/app/api/endpoints/product.py b/apps/pre-processing-service/app/api/endpoints/product.py index 2812ef79..0b9e888f 100644 --- a/apps/pre-processing-service/app/api/endpoints/product.py +++ b/apps/pre-processing-service/app/api/endpoints/product.py @@ -10,6 +10,7 @@ from ...service.search_service import SearchService from ...service.match_service import MatchService from ...service.similarity_service import SimilarityService +from ...service.product_selection_service import ProductSelectionService # from ...service.similarity_service import SimilarityService @@ -121,3 +122,19 @@ async def s3_upload(request: RequestS3Upload): raise HTTPException(status_code=e.status_code, detail=e.detail) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) + +@router.post("/select", response_model=ResponseProductSelect, summary="콘텐츠용 상품 선택") +def select_product(request: RequestProductSelect): # async 제거 + """ + S3 업로드 완료 후 콘텐츠 생성을 위한 최적 상품을 선택합니다. + """ + try: + selection_service = ProductSelectionService() + response_data = selection_service.select_product_for_content(request) # await 제거 + + if not response_data: + raise CustomException(500, "상품 선택에 실패했습니다.", "PRODUCT_SELECTION_FAILED") + + return response_data + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) \ No newline at end of file diff --git a/apps/pre-processing-service/app/model/schemas.py b/apps/pre-processing-service/app/model/schemas.py index dd49cf44..a555dfe5 100644 --- a/apps/pre-processing-service/app/model/schemas.py +++ b/apps/pre-processing-service/app/model/schemas.py @@ -161,7 +161,7 @@ class ResponseSadaguCrawl(ResponseBase[SadaguCrawlData]): pass -# ============== S3 이미지 업로드 ============== +# ============== S3 업로드 ============== class RequestS3Upload(RequestBase): @@ -227,12 +227,6 @@ class S3UploadData(BaseModel): uploaded_at: str = Field( ..., title="업로드 완료 시간", description="S3 업로드 완료 시간" ) - # 🆕 임시: 콘텐츠 생성용 단일 상품만 추가 (나중에 삭제 예정) - selected_product_for_content: Optional[Dict] = Field( - None, - title="콘텐츠 생성용 선택 상품", - description="임시: 블로그 콘텐츠 생성을 위해 선택된 단일 상품 정보", - ) # 최종 응답 모델 @@ -241,6 +235,26 @@ class ResponseS3Upload(ResponseBase[S3UploadData]): pass +# ============== 상품 선택 (새로 추가) ============== + +class RequestProductSelect(RequestBase): + task_run_id: int = Field(..., title="Task Run ID", description="상품을 선택할 task_run_id") + selection_criteria: Optional[str] = Field( + None, title="선택 기준", description="특별한 선택 기준 (기본: 이미지 개수 우선)" + ) + + +# 응답 데이터 모델 +class ProductSelectData(BaseModel): + task_run_id: int = Field(..., title="Task Run ID") + selected_product: Dict = Field(..., title="선택된 상품", description="콘텐츠 생성용으로 선택된 상품") + total_available_products: int = Field(..., title="전체 상품 수", description="선택 가능했던 전체 상품 개수") + + +# 최종 응답 모델 +class ResponseProductSelect(ResponseBase[ProductSelectData]): + """상품 선택 API 응답""" + pass # ============== 블로그 콘텐츠 생성 ============== diff --git a/apps/pre-processing-service/app/service/product_selection_service.py b/apps/pre-processing-service/app/service/product_selection_service.py new file mode 100644 index 00000000..96093707 --- /dev/null +++ b/apps/pre-processing-service/app/service/product_selection_service.py @@ -0,0 +1,191 @@ +import json +from typing import List, Dict +from loguru import logger +from app.model.schemas import RequestProductSelect +from app.utils.response import Response +from app.db.mariadb_manager import MariadbManager + + +class ProductSelectionService: + """콘텐츠 생성용 단일 상품 선택 서비스""" + + def __init__(self): + self.db_manager = MariadbManager() + + def select_product_for_content(self, request: RequestProductSelect) -> dict: + """ + S3 업로드와 DB 저장 결과를 바탕으로 콘텐츠 생성용 단일 상품을 선택 + """ + try: + task_run_id = request.task_run_id + logger.info(f"콘텐츠용 상품 선택 시작: task_run_id={task_run_id}") + + # 1. DB에서 해당 task_run_id의 모든 상품 조회 + db_products = self._fetch_products_from_db(task_run_id) + + if not db_products: + logger.warning(f"DB에서 상품을 찾을 수 없음: task_run_id={task_run_id}") + return Response.error("상품 데이터를 찾을 수 없습니다.", "PRODUCTS_NOT_FOUND") + + # 2. 최적 상품 선택 + selected_product = self._select_best_product(db_products) + + logger.success( + f"콘텐츠용 상품 선택 완료: name={selected_product['name']}, " + f"selection_reason={selected_product['selection_reason']}" + ) + + data = { + "task_run_id": task_run_id, + "selected_product": selected_product, + "total_available_products": len(db_products), + } + + return Response.ok(data, f"콘텐츠용 상품 선택 완료: {selected_product['name']}") + + except Exception as e: + logger.error(f"콘텐츠용 상품 선택 오류: {e}") + raise + + def _fetch_products_from_db(self, task_run_id: int) -> List[Dict]: + """DB에서 task_run_id에 해당하는 모든 상품 조회""" + try: + sql = """ + SELECT id, \ + name, \ + data_value, \ + created_at + FROM task_io_data + WHERE task_run_id = %s + AND io_type = 'OUTPUT' + AND data_type = 'JSON' + ORDER BY name \ + """ + + with self.db_manager.get_cursor() as cursor: + cursor.execute(sql, (task_run_id,)) + rows = cursor.fetchall() + + products = [] + for row in rows: + try: + # MariaDB에서 반환되는 row는 튜플 형태 + id, name, data_value_str, created_at = row + + # JSON 데이터 파싱 + data_value = json.loads(data_value_str) + + products.append({ + "id": id, + "name": name, + "data_value": data_value, + "created_at": created_at + }) + except json.JSONDecodeError as e: + logger.warning(f"JSON 파싱 실패: name={name}, error={e}") + continue + except Exception as e: + logger.warning(f"Row 처리 실패: {row}, error={e}") + continue + + logger.info(f"DB에서 {len(products)}개 상품 조회 완료") + return products + + except Exception as e: + logger.error(f"DB 상품 조회 오류: {e}") + return [] + + def _select_best_product(self, db_products: List[Dict]) -> Dict: + """ + 상품 선택 로직: + 1순위: S3 이미지 업로드가 성공하고 이미지가 많은 상품 + 2순위: 크롤링 성공한 첫 번째 상품 + 3순위: 첫 번째 상품 (fallback) + """ + try: + successful_products = [] + + # 1순위: S3 업로드 성공하고 이미지가 있는 상품들 + for product in db_products: + data_value = product.get("data_value", {}) + product_detail = data_value.get("product_detail", {}) + product_images = product_detail.get("product_images", []) + + # 크롤링 성공하고 이미지가 있는 상품 + if (data_value.get("status") == "success" and + product_detail and len(product_images) > 0): + successful_products.append({ + "product": product, + "image_count": len(product_images), + "title": product_detail.get("title", "Unknown") + }) + + if successful_products: + # 이미지 개수가 가장 많은 상품 선택 + best_product = max(successful_products, key=lambda x: x["image_count"]) + + logger.info( + f"1순위 선택: name={best_product['product']['name']}, " + f"images={best_product['image_count']}개" + ) + + return { + "selection_reason": "s3_upload_success_with_most_images", + "name": best_product["product"]["name"], + "product_info": best_product["product"]["data_value"], + "image_count": best_product["image_count"], + "title": best_product["title"] + } + + # 2순위: 크롤링 성공한 첫 번째 상품 (이미지 없어도) + for product in db_products: + data_value = product.get("data_value", {}) + if (data_value.get("status") == "success" and + data_value.get("product_detail")): + product_detail = data_value.get("product_detail", {}) + logger.info(f"2순위 선택: name={product['name']}") + + return { + "selection_reason": "first_crawl_success", + "name": product["name"], + "product_info": data_value, + "image_count": len(product_detail.get("product_images", [])), + "title": product_detail.get("title", "Unknown") + } + + # 3순위: 첫 번째 상품 (fallback) + if db_products: + first_product = db_products[0] + data_value = first_product.get("data_value", {}) + product_detail = data_value.get("product_detail", {}) + + logger.warning(f"3순위 fallback 선택: name={first_product['name']}") + + return { + "selection_reason": "fallback_first_product", + "name": first_product["name"], + "product_info": data_value, + "image_count": len(product_detail.get("product_images", [])), + "title": product_detail.get("title", "Unknown") + } + + # 모든 경우 실패 + logger.error("선택할 상품이 없습니다") + return { + "selection_reason": "no_products_available", + "name": None, + "product_info": None, + "image_count": 0, + "title": "Unknown" + } + + except Exception as e: + logger.error(f"상품 선택 로직 오류: {e}") + return { + "selection_reason": "selection_error", + "name": db_products[0]["name"] if db_products else None, + "product_info": db_products[0]["data_value"] if db_products else None, + "image_count": 0, + "title": "Unknown", + "error": str(e) + } \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/s3_upload_service.py b/apps/pre-processing-service/app/service/s3_upload_service.py index 48c84d35..7e52152c 100644 --- a/apps/pre-processing-service/app/service/s3_upload_service.py +++ b/apps/pre-processing-service/app/service/s3_upload_service.py @@ -1,41 +1,59 @@ import time +import json import asyncio import aiohttp +import ssl, certifi from typing import List, Dict +from datetime import datetime from loguru import logger from app.errors.CustomException import InvalidItemDataException from app.model.schemas import RequestS3Upload from app.utils.s3_upload_util import S3UploadUtil from app.utils.response import Response +from app.db.mariadb_manager import MariadbManager class S3UploadService: - """6단계: 크롤링된 상품 이미지들과 데이터를 S3에 업로드하는 서비스""" + """6단계: 크롤링된 상품 이미지들과 데이터를 S3에 업로드하고 DB에 저장하는 서비스""" def __init__(self): self.s3_util = S3UploadUtil() + self.db_manager = MariadbManager() async def upload_crawled_products_to_s3(self, request: RequestS3Upload) -> dict: """ - 크롤링된 상품들의 이미지와 데이터를 S3에 업로드하는 비즈니스 로직 (6단계) + 크롤링된 상품들의 이미지와 데이터를 S3에 업로드하고 DB에 저장하는 비즈니스 로직 (6단계) """ - keyword = request.keyword # 키워드 추가 + keyword = request.keyword crawled_products = request.crawled_products - base_folder = ( - request.base_folder or "product" - ) # 🔸 기본값 변경: product-images → product + base_folder = request.base_folder or "product" + + # task_run_id는 자바 워크플로우에서 전달받음 + task_run_id = getattr(request, 'task_run_id', None) + if not task_run_id: + # 임시: task_run_id가 없으면 생성 + task_run_id = int(time.time() * 1000) + logger.warning(f"task_run_id가 없어서 임시로 생성: {task_run_id}") + else: + logger.info(f"자바 워크플로우에서 전달받은 task_run_id: {task_run_id}") logger.info( - f"S3 업로드 서비스 시작: keyword='{keyword}', {len(crawled_products)}개 상품" + f"S3 업로드 + DB 저장 서비스 시작: keyword='{keyword}', " + f"{len(crawled_products)}개 상품, task_run_id={task_run_id}" ) upload_results = [] total_success_images = 0 total_fail_images = 0 + db_save_results = [] try: # HTTP 세션을 사용한 이미지 다운로드 - async with aiohttp.ClientSession() as session: + + ssl_context = ssl.create_default_context(cafile=certifi.where()) + connector = aiohttp.TCPConnector(ssl=ssl_context) + + async with aiohttp.ClientSession(connector=connector) as session: # 각 상품별로 순차 업로드 for product_info in crawled_products: @@ -43,7 +61,7 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload) -> dict: product_detail = product_info.get("product_detail") logger.info( - f"상품 {product_index}/{len(crawled_products)} S3 업로드 시작" + f"상품 {product_index}/{len(crawled_products)} S3 업로드 + DB 저장 시작" ) # 크롤링 실패한 상품은 스킵 @@ -62,30 +80,43 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload) -> dict: "fail_count": 0, } ) + db_save_results.append({ + "product_index": product_index, + "db_status": "skipped", + "error": "크롤링 실패" + }) continue try: - # 상품 이미지 + 데이터 업로드 (키워드 전달 추가!) - # 🔸 전체 크롤링 데이터를 전달 (product_detail이 아닌 product_info 전체) + # 1. 상품 이미지 + 데이터 S3 업로드 upload_result = await self.s3_util.upload_single_product_images( session, product_info, product_index, keyword, - base_folder, # product_detail → product_info + base_folder, ) upload_results.append(upload_result) total_success_images += upload_result["success_count"] total_fail_images += upload_result["fail_count"] + # 2. DB에 상품 데이터 저장 + db_result = self._save_product_to_db( + task_run_id, + keyword, + product_index, + product_info + ) + db_save_results.append(db_result) + logger.success( - f"상품 {product_index} S3 업로드 완료: 성공 {upload_result['success_count']}개, " - f"실패 {upload_result['fail_count']}개" + f"상품 {product_index} S3 업로드 + DB 저장 완료: " + f"이미지 성공 {upload_result['success_count']}개, DB {db_result['db_status']}" ) except Exception as e: - logger.error(f"상품 {product_index} S3 업로드 오류: {e}") + logger.error(f"상품 {product_index} S3 업로드/DB 저장 오류: {e}") upload_results.append( { "product_index": product_index, @@ -97,122 +128,93 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload) -> dict: "fail_count": 0, } ) + db_save_results.append({ + "product_index": product_index, + "db_status": "error", + "error": str(e) + }) # 상품간 간격 (서버 부하 방지) if product_index < len(crawled_products): await asyncio.sleep(1) - # 🆕 임시: 콘텐츠 생성용 단일 상품 선택 로직 - selected_product_for_content = self._select_single_product_for_content( - crawled_products, upload_results - ) - logger.success( - f"S3 업로드 서비스 완료: 총 성공 이미지 {total_success_images}개, 총 실패 이미지 {total_fail_images}개" + f"S3 업로드 + DB 저장 서비스 완료: 총 성공 이미지 {total_success_images}개, " + f"총 실패 이미지 {total_fail_images}개" ) - # 기존 응답 데이터 구성 + # 응답 데이터 구성 data = { "upload_results": upload_results, + "db_save_results": db_save_results, + "task_run_id": task_run_id, "summary": { "total_products": len(crawled_products), "total_success_images": total_success_images, "total_fail_images": total_fail_images, + "db_success_count": len([r for r in db_save_results if r.get("db_status") == "success"]), + "db_fail_count": len([r for r in db_save_results if r.get("db_status") == "error"]), }, "uploaded_at": time.strftime("%Y-%m-%d %H:%M:%S"), - # 🆕 임시: 콘텐츠 생성용 단일 상품만 추가 (나중에 삭제 예정) - "selected_product_for_content": selected_product_for_content, } - message = f"S3 업로드 완료: {total_success_images}개 이미지 업로드 성공, 상품 데이터 JSON 파일 포함" + message = f"S3 업로드 + DB 저장 완료: {total_success_images}개 이미지 성공, {len([r for r in db_save_results if r.get('db_status') == 'success'])}개 상품 DB 저장 성공" return Response.ok(data, message) except Exception as e: - logger.error(f"S3 업로드 서비스 전체 오류: {e}") + logger.error(f"S3 업로드 + DB 저장 서비스 전체 오류: {e}") raise InvalidItemDataException() - def _select_single_product_for_content( - self, crawled_products: List[Dict], upload_results: List[Dict] + def _save_product_to_db( + self, + task_run_id: int, + keyword: str, + product_index: int, + product_info: Dict ) -> Dict: """ - 🆕 임시: 콘텐츠 생성을 위한 단일 상품 선택 로직 - 우선순위: 1) S3 업로드 성공한 상품 중 이미지 개수가 많은 것 - 2) 없다면 크롤링 성공한 첫 번째 상품 + 상품 데이터를 TASK_IO_DATA 테이블에 저장 (MariaDB) """ try: - # 1순위: S3 업로드 성공하고 이미지가 있는 상품들 - successful_uploads = [ - result - for result in upload_results - if result.get("status") == "completed" - and result.get("success_count", 0) > 0 - ] - - if successful_uploads: - # 이미지 개수가 가장 많은 상품 선택 - best_upload = max( - successful_uploads, key=lambda x: x.get("success_count", 0) - ) - selected_index = best_upload["product_index"] - - # 원본 크롤링 데이터에서 해당 상품 찾기 - for product_info in crawled_products: - if product_info.get("index") == selected_index: - logger.info( - f"콘텐츠 생성용 상품 선택: index={selected_index}, " - f"title='{product_info.get('product_detail', {}).get('title', 'Unknown')[:30]}', " - f"images={best_upload.get('success_count', 0)}개" - ) - return { - "selection_reason": "s3_upload_success_with_most_images", - "product_info": product_info, - "s3_upload_info": best_upload, - } - - # 2순위: 크롤링 성공한 첫 번째 상품 (S3 업로드 실패해도) - for product_info in crawled_products: - if product_info.get("status") == "success" and product_info.get( - "product_detail" - ): - - # 해당 상품의 S3 업로드 정보 찾기 - upload_info = None - for result in upload_results: - if result.get("product_index") == product_info.get("index"): - upload_info = result - break + # 상품명 생성 (산리오_01 형식) + product_name = f"{keyword}_{product_index:02d}" + + # data_value에 저장할 JSON 데이터 (전체 product_info) + data_value_json = json.dumps(product_info, ensure_ascii=False) + + # 현재 시간 + created_at = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + + # MariaDB에 저장 + with self.db_manager.get_cursor() as cursor: + sql = """ + INSERT INTO task_io_data + (task_run_id, io_type, name, data_type, data_value, created_at) + VALUES (%s, %s, %s, %s, %s, %s) \ + """ + + cursor.execute(sql, ( + task_run_id, + "OUTPUT", + product_name, + "JSON", + data_value_json, + created_at + )) + + logger.success(f"상품 {product_index} DB 저장 성공: name={product_name}") - logger.info( - f"콘텐츠 생성용 상품 선택 (fallback): index={product_info.get('index')}, " - f"title='{product_info.get('product_detail', {}).get('title', 'Unknown')[:30]}'" - ) - return { - "selection_reason": "first_crawl_success", - "product_info": product_info, - "s3_upload_info": upload_info, - } - - # 3순위: 아무거나 (모든 상품이 실패한 경우) - if crawled_products: - logger.warning("모든 상품이 크롤링 실패 - 첫 번째 상품으로 fallback") - return { - "selection_reason": "fallback_first_product", - "product_info": crawled_products[0], - "s3_upload_info": upload_results[0] if upload_results else None, - } - - logger.error("선택할 상품이 없습니다") return { - "selection_reason": "no_products_available", - "product_info": None, - "s3_upload_info": None, + "product_index": product_index, + "product_name": product_name, + "db_status": "success", + "task_run_id": task_run_id, } except Exception as e: - logger.error(f"단일 상품 선택 오류: {e}") + logger.error(f"상품 {product_index} DB 저장 오류: {e}") return { - "selection_reason": "selection_error", - "product_info": crawled_products[0] if crawled_products else None, - "s3_upload_info": upload_results[0] if upload_results else None, - "error": str(e), - } + "product_index": product_index, + "db_status": "error", + "error": str(e) + } \ No newline at end of file diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/BlogRagBodyBuilder.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/BlogRagBodyBuilder.java index 419a23a4..87c838a5 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/BlogRagBodyBuilder.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/BlogRagBodyBuilder.java @@ -17,39 +17,33 @@ @RequiredArgsConstructor public class BlogRagBodyBuilder implements TaskBodyBuilder { - private final ObjectMapper objectMapper; - private static final String TASK_NAME = "블로그 RAG 생성 태스크"; - private static final String KEYWORD_SOURCE_TASK = "키워드 검색 태스크"; - private static final String S3_UPLOAD_SOURCE_TASK = "S3 업로드 태스크"; // 변경: 크롤링 → S3 업로드 - - @Override - public boolean supports(String taskName) { - return TASK_NAME.equals(taskName); - } - - @Override - public ObjectNode build(Task task, Map workflowContext) { - ObjectNode body = objectMapper.createObjectNode(); - - // 키워드 정보 가져오기 - Optional.ofNullable(workflowContext.get(KEYWORD_SOURCE_TASK)) - .map(node -> node.path("data").path("keyword")) - .ifPresent(keywordNode -> body.set("keyword", keywordNode)); - - // S3 업로드에서 선택된 상품 정보 가져오기 (변경된 부분) - Optional.ofNullable(workflowContext.get(S3_UPLOAD_SOURCE_TASK)) - .map( - node -> - node.path("data") - .path("selected_product_for_content") - .path("product_info") - .path("product_detail")) - .ifPresent(productNode -> body.set("product_info", productNode)); - - // 기본 콘텐츠 설정 - body.put("content_type", "review_blog"); - body.put("target_length", 1000); - - return body; - } -} + private final ObjectMapper objectMapper; + private static final String TASK_NAME = "블로그 RAG 생성 태스크"; + private static final String KEYWORD_SOURCE_TASK = "키워드 검색 태스크"; + private static final String PRODUCT_SELECT_SOURCE_TASK = "상품 선택 태스크"; // 변경: S3 업로드 → 상품 선택 + + @Override + public boolean supports(String taskName) { + return TASK_NAME.equals(taskName); + } + + @Override + public ObjectNode build(Task task, Map workflowContext) { + ObjectNode body = objectMapper.createObjectNode(); + + // 키워드 정보 가져오기 + Optional.ofNullable(workflowContext.get(KEYWORD_SOURCE_TASK)) + .map(node -> node.path("data").path("keyword")) + .ifPresent(keywordNode -> body.set("keyword", keywordNode)); + + Optional.ofNullable(workflowContext.get(PRODUCT_SELECT_SOURCE_TASK)) + .map(node -> node.path("data").path("selected_product")) + .ifPresent(productNode -> body.set("product_info", productNode)); + + // 기본 콘텐츠 설정 + body.put("content_type", "review_blog"); + body.put("target_length", 1000); + + return body; + } +} \ No newline at end of file diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/ProductSelectBodyBuilder.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/ProductSelectBodyBuilder.java new file mode 100644 index 00000000..1d2ef5bf --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/ProductSelectBodyBuilder.java @@ -0,0 +1,40 @@ +package site.icebang.domain.workflow.runner.fastapi.body; + +import java.util.Map; + +import org.springframework.stereotype.Component; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + +import lombok.RequiredArgsConstructor; + +import site.icebang.domain.workflow.model.Task; + +@Component +@RequiredArgsConstructor +public class ProductSelectBodyBuilder implements TaskBodyBuilder { + + private final ObjectMapper objectMapper; + private static final String TASK_NAME = "상품 선택 태스크"; + + @Override + public boolean supports(String taskName) { + return TASK_NAME.equals(taskName); + } + + @Override + public ObjectNode build(Task task, Map workflowContext) { + ObjectNode body = objectMapper.createObjectNode(); + + // task_run_id는 현재 실행 중인 task의 run_id를 사용 + // 실제 구현에서는 Task 객체나 워크플로우 컨텍스트에서 가져와야 할 수 있습니다. + body.put("task_run_id", task.getId()); // Task 객체에서 ID를 가져오는 방식으로 가정 + + // 기본 선택 기준 설정 (이미지 개수 우선) + body.put("selection_criteria", "image_count_priority"); + + return body; + } +} \ No newline at end of file diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/S3UploadBodyBuilder.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/S3UploadBodyBuilder.java index bd0f823e..7b927dff 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/S3UploadBodyBuilder.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/S3UploadBodyBuilder.java @@ -17,33 +17,35 @@ @RequiredArgsConstructor public class S3UploadBodyBuilder implements TaskBodyBuilder { - private final ObjectMapper objectMapper; - private static final String TASK_NAME = "S3 업로드 태스크"; - private static final String KEYWORD_SOURCE_TASK = "키워드 검색 태스크"; - private static final String CRAWL_SOURCE_TASK = "상품 정보 크롤링 태스크"; - - @Override - public boolean supports(String taskName) { - return TASK_NAME.equals(taskName); - } - - @Override - public ObjectNode build(Task task, Map workflowContext) { - ObjectNode body = objectMapper.createObjectNode(); - - // 키워드 정보 가져오기 - Optional.ofNullable(workflowContext.get(KEYWORD_SOURCE_TASK)) - .map(node -> node.path("data").path("keyword")) - .ifPresent(keywordNode -> body.set("keyword", keywordNode)); - - // 크롤링된 상품 데이터 가져오기 - Optional.ofNullable(workflowContext.get(CRAWL_SOURCE_TASK)) - .map(node -> node.path("data").path("crawled_products")) - .ifPresent(crawledProductsNode -> body.set("crawled_products", crawledProductsNode)); - - // 기본 폴더 설정 - body.put("base_folder", "product"); - - return body; - } -} + private final ObjectMapper objectMapper; + private static final String TASK_NAME = "S3 업로드 태스크"; + private static final String KEYWORD_SOURCE_TASK = "키워드 검색 태스크"; + private static final String CRAWL_SOURCE_TASK = "상품 정보 크롤링 태스크"; + + @Override + public boolean supports(String taskName) { + return TASK_NAME.equals(taskName); + } + + @Override + public ObjectNode build(Task task, Map workflowContext) { + ObjectNode body = objectMapper.createObjectNode(); + + // 키워드 정보 가져오기 (폴더명 생성용 - 스키마 주석 참조) + Optional.ofNullable(workflowContext.get(KEYWORD_SOURCE_TASK)) + .map(node -> node.path("data").path("keyword")) + .filter(node -> !node.isMissingNode() && !node.asText().trim().isEmpty()) + .ifPresent(keywordNode -> body.set("keyword", keywordNode)); + + // 크롤링된 상품 데이터 가져오기 + Optional.ofNullable(workflowContext.get(CRAWL_SOURCE_TASK)) + .map(node -> node.path("data").path("crawled_products")) + .filter(node -> !node.isMissingNode()) + .ifPresent(crawledProductsNode -> body.set("crawled_products", crawledProductsNode)); + + // 기본 폴더 설정 (스키마의 기본값과 일치) + body.put("base_folder", "product"); + + return body; + } +} \ No newline at end of file From 9076e8a9d0081c34147f491a8d8b8b79e4cb8d69 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Thu, 25 Sep 2025 15:36:04 +0900 Subject: [PATCH 04/18] chore: poetry run black . & spotlessApply --- .../app/api/endpoints/product.py | 15 ++-- .../app/model/schemas.py | 16 ++++- .../app/service/product_selection_service.py | 58 +++++++++------ .../app/service/s3_upload_service.py | 72 ++++++++++--------- .../fastapi/body/BlogRagBodyBuilder.java | 60 ++++++++-------- .../body/ProductSelectBodyBuilder.java | 34 ++++----- .../fastapi/body/S3UploadBodyBuilder.java | 64 ++++++++--------- 7 files changed, 177 insertions(+), 142 deletions(-) diff --git a/apps/pre-processing-service/app/api/endpoints/product.py b/apps/pre-processing-service/app/api/endpoints/product.py index 0b9e888f..f5a91272 100644 --- a/apps/pre-processing-service/app/api/endpoints/product.py +++ b/apps/pre-processing-service/app/api/endpoints/product.py @@ -123,18 +123,25 @@ async def s3_upload(request: RequestS3Upload): except Exception as e: raise HTTPException(status_code=500, detail=str(e)) -@router.post("/select", response_model=ResponseProductSelect, summary="콘텐츠용 상품 선택") + +@router.post( + "/select", response_model=ResponseProductSelect, summary="콘텐츠용 상품 선택" +) def select_product(request: RequestProductSelect): # async 제거 """ S3 업로드 완료 후 콘텐츠 생성을 위한 최적 상품을 선택합니다. """ try: selection_service = ProductSelectionService() - response_data = selection_service.select_product_for_content(request) # await 제거 + response_data = selection_service.select_product_for_content( + request + ) # await 제거 if not response_data: - raise CustomException(500, "상품 선택에 실패했습니다.", "PRODUCT_SELECTION_FAILED") + raise CustomException( + 500, "상품 선택에 실패했습니다.", "PRODUCT_SELECTION_FAILED" + ) return response_data except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) \ No newline at end of file + raise HTTPException(status_code=500, detail=str(e)) diff --git a/apps/pre-processing-service/app/model/schemas.py b/apps/pre-processing-service/app/model/schemas.py index a555dfe5..7487927b 100644 --- a/apps/pre-processing-service/app/model/schemas.py +++ b/apps/pre-processing-service/app/model/schemas.py @@ -235,10 +235,14 @@ class ResponseS3Upload(ResponseBase[S3UploadData]): pass + # ============== 상품 선택 (새로 추가) ============== + class RequestProductSelect(RequestBase): - task_run_id: int = Field(..., title="Task Run ID", description="상품을 선택할 task_run_id") + task_run_id: int = Field( + ..., title="Task Run ID", description="상품을 선택할 task_run_id" + ) selection_criteria: Optional[str] = Field( None, title="선택 기준", description="특별한 선택 기준 (기본: 이미지 개수 우선)" ) @@ -247,15 +251,21 @@ class RequestProductSelect(RequestBase): # 응답 데이터 모델 class ProductSelectData(BaseModel): task_run_id: int = Field(..., title="Task Run ID") - selected_product: Dict = Field(..., title="선택된 상품", description="콘텐츠 생성용으로 선택된 상품") - total_available_products: int = Field(..., title="전체 상품 수", description="선택 가능했던 전체 상품 개수") + selected_product: Dict = Field( + ..., title="선택된 상품", description="콘텐츠 생성용으로 선택된 상품" + ) + total_available_products: int = Field( + ..., title="전체 상품 수", description="선택 가능했던 전체 상품 개수" + ) # 최종 응답 모델 class ResponseProductSelect(ResponseBase[ProductSelectData]): """상품 선택 API 응답""" + pass + # ============== 블로그 콘텐츠 생성 ============== diff --git a/apps/pre-processing-service/app/service/product_selection_service.py b/apps/pre-processing-service/app/service/product_selection_service.py index 96093707..723bd940 100644 --- a/apps/pre-processing-service/app/service/product_selection_service.py +++ b/apps/pre-processing-service/app/service/product_selection_service.py @@ -25,7 +25,9 @@ def select_product_for_content(self, request: RequestProductSelect) -> dict: if not db_products: logger.warning(f"DB에서 상품을 찾을 수 없음: task_run_id={task_run_id}") - return Response.error("상품 데이터를 찾을 수 없습니다.", "PRODUCTS_NOT_FOUND") + return Response.error( + "상품 데이터를 찾을 수 없습니다.", "PRODUCTS_NOT_FOUND" + ) # 2. 최적 상품 선택 selected_product = self._select_best_product(db_products) @@ -41,7 +43,9 @@ def select_product_for_content(self, request: RequestProductSelect) -> dict: "total_available_products": len(db_products), } - return Response.ok(data, f"콘텐츠용 상품 선택 완료: {selected_product['name']}") + return Response.ok( + data, f"콘텐츠용 상품 선택 완료: {selected_product['name']}" + ) except Exception as e: logger.error(f"콘텐츠용 상품 선택 오류: {e}") @@ -75,12 +79,14 @@ def _fetch_products_from_db(self, task_run_id: int) -> List[Dict]: # JSON 데이터 파싱 data_value = json.loads(data_value_str) - products.append({ - "id": id, - "name": name, - "data_value": data_value, - "created_at": created_at - }) + products.append( + { + "id": id, + "name": name, + "data_value": data_value, + "created_at": created_at, + } + ) except json.JSONDecodeError as e: logger.warning(f"JSON 파싱 실패: name={name}, error={e}") continue @@ -112,13 +118,18 @@ def _select_best_product(self, db_products: List[Dict]) -> Dict: product_images = product_detail.get("product_images", []) # 크롤링 성공하고 이미지가 있는 상품 - if (data_value.get("status") == "success" and - product_detail and len(product_images) > 0): - successful_products.append({ - "product": product, - "image_count": len(product_images), - "title": product_detail.get("title", "Unknown") - }) + if ( + data_value.get("status") == "success" + and product_detail + and len(product_images) > 0 + ): + successful_products.append( + { + "product": product, + "image_count": len(product_images), + "title": product_detail.get("title", "Unknown"), + } + ) if successful_products: # 이미지 개수가 가장 많은 상품 선택 @@ -134,14 +145,15 @@ def _select_best_product(self, db_products: List[Dict]) -> Dict: "name": best_product["product"]["name"], "product_info": best_product["product"]["data_value"], "image_count": best_product["image_count"], - "title": best_product["title"] + "title": best_product["title"], } # 2순위: 크롤링 성공한 첫 번째 상품 (이미지 없어도) for product in db_products: data_value = product.get("data_value", {}) - if (data_value.get("status") == "success" and - data_value.get("product_detail")): + if data_value.get("status") == "success" and data_value.get( + "product_detail" + ): product_detail = data_value.get("product_detail", {}) logger.info(f"2순위 선택: name={product['name']}") @@ -150,7 +162,7 @@ def _select_best_product(self, db_products: List[Dict]) -> Dict: "name": product["name"], "product_info": data_value, "image_count": len(product_detail.get("product_images", [])), - "title": product_detail.get("title", "Unknown") + "title": product_detail.get("title", "Unknown"), } # 3순위: 첫 번째 상품 (fallback) @@ -166,7 +178,7 @@ def _select_best_product(self, db_products: List[Dict]) -> Dict: "name": first_product["name"], "product_info": data_value, "image_count": len(product_detail.get("product_images", [])), - "title": product_detail.get("title", "Unknown") + "title": product_detail.get("title", "Unknown"), } # 모든 경우 실패 @@ -176,7 +188,7 @@ def _select_best_product(self, db_products: List[Dict]) -> Dict: "name": None, "product_info": None, "image_count": 0, - "title": "Unknown" + "title": "Unknown", } except Exception as e: @@ -187,5 +199,5 @@ def _select_best_product(self, db_products: List[Dict]) -> Dict: "product_info": db_products[0]["data_value"] if db_products else None, "image_count": 0, "title": "Unknown", - "error": str(e) - } \ No newline at end of file + "error": str(e), + } diff --git a/apps/pre-processing-service/app/service/s3_upload_service.py b/apps/pre-processing-service/app/service/s3_upload_service.py index 7e52152c..c804a201 100644 --- a/apps/pre-processing-service/app/service/s3_upload_service.py +++ b/apps/pre-processing-service/app/service/s3_upload_service.py @@ -29,7 +29,7 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload) -> dict: base_folder = request.base_folder or "product" # task_run_id는 자바 워크플로우에서 전달받음 - task_run_id = getattr(request, 'task_run_id', None) + task_run_id = getattr(request, "task_run_id", None) if not task_run_id: # 임시: task_run_id가 없으면 생성 task_run_id = int(time.time() * 1000) @@ -80,11 +80,13 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload) -> dict: "fail_count": 0, } ) - db_save_results.append({ - "product_index": product_index, - "db_status": "skipped", - "error": "크롤링 실패" - }) + db_save_results.append( + { + "product_index": product_index, + "db_status": "skipped", + "error": "크롤링 실패", + } + ) continue try: @@ -103,10 +105,7 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload) -> dict: # 2. DB에 상품 데이터 저장 db_result = self._save_product_to_db( - task_run_id, - keyword, - product_index, - product_info + task_run_id, keyword, product_index, product_info ) db_save_results.append(db_result) @@ -116,7 +115,9 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload) -> dict: ) except Exception as e: - logger.error(f"상품 {product_index} S3 업로드/DB 저장 오류: {e}") + logger.error( + f"상품 {product_index} S3 업로드/DB 저장 오류: {e}" + ) upload_results.append( { "product_index": product_index, @@ -128,11 +129,13 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload) -> dict: "fail_count": 0, } ) - db_save_results.append({ - "product_index": product_index, - "db_status": "error", - "error": str(e) - }) + db_save_results.append( + { + "product_index": product_index, + "db_status": "error", + "error": str(e), + } + ) # 상품간 간격 (서버 부하 방지) if product_index < len(crawled_products): @@ -152,8 +155,12 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload) -> dict: "total_products": len(crawled_products), "total_success_images": total_success_images, "total_fail_images": total_fail_images, - "db_success_count": len([r for r in db_save_results if r.get("db_status") == "success"]), - "db_fail_count": len([r for r in db_save_results if r.get("db_status") == "error"]), + "db_success_count": len( + [r for r in db_save_results if r.get("db_status") == "success"] + ), + "db_fail_count": len( + [r for r in db_save_results if r.get("db_status") == "error"] + ), }, "uploaded_at": time.strftime("%Y-%m-%d %H:%M:%S"), } @@ -166,11 +173,7 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload) -> dict: raise InvalidItemDataException() def _save_product_to_db( - self, - task_run_id: int, - keyword: str, - product_index: int, - product_info: Dict + self, task_run_id: int, keyword: str, product_index: int, product_info: Dict ) -> Dict: """ 상품 데이터를 TASK_IO_DATA 테이블에 저장 (MariaDB) @@ -193,14 +196,17 @@ def _save_product_to_db( VALUES (%s, %s, %s, %s, %s, %s) \ """ - cursor.execute(sql, ( - task_run_id, - "OUTPUT", - product_name, - "JSON", - data_value_json, - created_at - )) + cursor.execute( + sql, + ( + task_run_id, + "OUTPUT", + product_name, + "JSON", + data_value_json, + created_at, + ), + ) logger.success(f"상품 {product_index} DB 저장 성공: name={product_name}") @@ -216,5 +222,5 @@ def _save_product_to_db( return { "product_index": product_index, "db_status": "error", - "error": str(e) - } \ No newline at end of file + "error": str(e), + } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/BlogRagBodyBuilder.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/BlogRagBodyBuilder.java index 87c838a5..8a8008ed 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/BlogRagBodyBuilder.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/BlogRagBodyBuilder.java @@ -17,33 +17,33 @@ @RequiredArgsConstructor public class BlogRagBodyBuilder implements TaskBodyBuilder { - private final ObjectMapper objectMapper; - private static final String TASK_NAME = "블로그 RAG 생성 태스크"; - private static final String KEYWORD_SOURCE_TASK = "키워드 검색 태스크"; - private static final String PRODUCT_SELECT_SOURCE_TASK = "상품 선택 태스크"; // 변경: S3 업로드 → 상품 선택 - - @Override - public boolean supports(String taskName) { - return TASK_NAME.equals(taskName); - } - - @Override - public ObjectNode build(Task task, Map workflowContext) { - ObjectNode body = objectMapper.createObjectNode(); - - // 키워드 정보 가져오기 - Optional.ofNullable(workflowContext.get(KEYWORD_SOURCE_TASK)) - .map(node -> node.path("data").path("keyword")) - .ifPresent(keywordNode -> body.set("keyword", keywordNode)); - - Optional.ofNullable(workflowContext.get(PRODUCT_SELECT_SOURCE_TASK)) - .map(node -> node.path("data").path("selected_product")) - .ifPresent(productNode -> body.set("product_info", productNode)); - - // 기본 콘텐츠 설정 - body.put("content_type", "review_blog"); - body.put("target_length", 1000); - - return body; - } -} \ No newline at end of file + private final ObjectMapper objectMapper; + private static final String TASK_NAME = "블로그 RAG 생성 태스크"; + private static final String KEYWORD_SOURCE_TASK = "키워드 검색 태스크"; + private static final String PRODUCT_SELECT_SOURCE_TASK = "상품 선택 태스크"; // 변경: S3 업로드 → 상품 선택 + + @Override + public boolean supports(String taskName) { + return TASK_NAME.equals(taskName); + } + + @Override + public ObjectNode build(Task task, Map workflowContext) { + ObjectNode body = objectMapper.createObjectNode(); + + // 키워드 정보 가져오기 + Optional.ofNullable(workflowContext.get(KEYWORD_SOURCE_TASK)) + .map(node -> node.path("data").path("keyword")) + .ifPresent(keywordNode -> body.set("keyword", keywordNode)); + + Optional.ofNullable(workflowContext.get(PRODUCT_SELECT_SOURCE_TASK)) + .map(node -> node.path("data").path("selected_product")) + .ifPresent(productNode -> body.set("product_info", productNode)); + + // 기본 콘텐츠 설정 + body.put("content_type", "review_blog"); + body.put("target_length", 1000); + + return body; + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/ProductSelectBodyBuilder.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/ProductSelectBodyBuilder.java index 1d2ef5bf..17934012 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/ProductSelectBodyBuilder.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/ProductSelectBodyBuilder.java @@ -16,25 +16,25 @@ @RequiredArgsConstructor public class ProductSelectBodyBuilder implements TaskBodyBuilder { - private final ObjectMapper objectMapper; - private static final String TASK_NAME = "상품 선택 태스크"; + private final ObjectMapper objectMapper; + private static final String TASK_NAME = "상품 선택 태스크"; - @Override - public boolean supports(String taskName) { - return TASK_NAME.equals(taskName); - } + @Override + public boolean supports(String taskName) { + return TASK_NAME.equals(taskName); + } - @Override - public ObjectNode build(Task task, Map workflowContext) { - ObjectNode body = objectMapper.createObjectNode(); + @Override + public ObjectNode build(Task task, Map workflowContext) { + ObjectNode body = objectMapper.createObjectNode(); - // task_run_id는 현재 실행 중인 task의 run_id를 사용 - // 실제 구현에서는 Task 객체나 워크플로우 컨텍스트에서 가져와야 할 수 있습니다. - body.put("task_run_id", task.getId()); // Task 객체에서 ID를 가져오는 방식으로 가정 + // task_run_id는 현재 실행 중인 task의 run_id를 사용 + // 실제 구현에서는 Task 객체나 워크플로우 컨텍스트에서 가져와야 할 수 있습니다. + body.put("task_run_id", task.getId()); // Task 객체에서 ID를 가져오는 방식으로 가정 - // 기본 선택 기준 설정 (이미지 개수 우선) - body.put("selection_criteria", "image_count_priority"); + // 기본 선택 기준 설정 (이미지 개수 우선) + body.put("selection_criteria", "image_count_priority"); - return body; - } -} \ No newline at end of file + return body; + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/S3UploadBodyBuilder.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/S3UploadBodyBuilder.java index 7b927dff..7548452a 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/S3UploadBodyBuilder.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/S3UploadBodyBuilder.java @@ -17,35 +17,35 @@ @RequiredArgsConstructor public class S3UploadBodyBuilder implements TaskBodyBuilder { - private final ObjectMapper objectMapper; - private static final String TASK_NAME = "S3 업로드 태스크"; - private static final String KEYWORD_SOURCE_TASK = "키워드 검색 태스크"; - private static final String CRAWL_SOURCE_TASK = "상품 정보 크롤링 태스크"; - - @Override - public boolean supports(String taskName) { - return TASK_NAME.equals(taskName); - } - - @Override - public ObjectNode build(Task task, Map workflowContext) { - ObjectNode body = objectMapper.createObjectNode(); - - // 키워드 정보 가져오기 (폴더명 생성용 - 스키마 주석 참조) - Optional.ofNullable(workflowContext.get(KEYWORD_SOURCE_TASK)) - .map(node -> node.path("data").path("keyword")) - .filter(node -> !node.isMissingNode() && !node.asText().trim().isEmpty()) - .ifPresent(keywordNode -> body.set("keyword", keywordNode)); - - // 크롤링된 상품 데이터 가져오기 - Optional.ofNullable(workflowContext.get(CRAWL_SOURCE_TASK)) - .map(node -> node.path("data").path("crawled_products")) - .filter(node -> !node.isMissingNode()) - .ifPresent(crawledProductsNode -> body.set("crawled_products", crawledProductsNode)); - - // 기본 폴더 설정 (스키마의 기본값과 일치) - body.put("base_folder", "product"); - - return body; - } -} \ No newline at end of file + private final ObjectMapper objectMapper; + private static final String TASK_NAME = "S3 업로드 태스크"; + private static final String KEYWORD_SOURCE_TASK = "키워드 검색 태스크"; + private static final String CRAWL_SOURCE_TASK = "상품 정보 크롤링 태스크"; + + @Override + public boolean supports(String taskName) { + return TASK_NAME.equals(taskName); + } + + @Override + public ObjectNode build(Task task, Map workflowContext) { + ObjectNode body = objectMapper.createObjectNode(); + + // 키워드 정보 가져오기 (폴더명 생성용 - 스키마 주석 참조) + Optional.ofNullable(workflowContext.get(KEYWORD_SOURCE_TASK)) + .map(node -> node.path("data").path("keyword")) + .filter(node -> !node.isMissingNode() && !node.asText().trim().isEmpty()) + .ifPresent(keywordNode -> body.set("keyword", keywordNode)); + + // 크롤링된 상품 데이터 가져오기 + Optional.ofNullable(workflowContext.get(CRAWL_SOURCE_TASK)) + .map(node -> node.path("data").path("crawled_products")) + .filter(node -> !node.isMissingNode()) + .ifPresent(crawledProductsNode -> body.set("crawled_products", crawledProductsNode)); + + // 기본 폴더 설정 (스키마의 기본값과 일치) + body.put("base_folder", "product"); + + return body; + } +} From ac8c7846965ce0dda2d592fecb2b9b31b3f70ce0 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Thu, 25 Sep 2025 16:04:36 +0900 Subject: [PATCH 05/18] =?UTF-8?q?refactor:=20=ED=81=AC=EB=A1=A4=EB=A7=81?= =?UTF-8?q?=20=EC=84=9C=EB=B9=84=EC=8A=A4=20=EC=88=9C=EC=B0=A8=EC=A0=81=20?= =?UTF-8?q?=ED=81=AC=EB=A1=A4=EB=A7=81=EC=97=90=EC=84=9C=20=EB=B9=84?= =?UTF-8?q?=EB=8F=99=EA=B8=B0=20=ED=81=AC=EB=A1=A4=EB=A7=81=EC=9C=BC?= =?UTF-8?q?=EB=A1=9C=20=EB=B3=80=EA=B2=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/service/crawl_service.py | 150 ++++++++++-------- .../app/service/crawlers/detail_crawler.py | 77 +++++---- 2 files changed, 132 insertions(+), 95 deletions(-) diff --git a/apps/pre-processing-service/app/service/crawl_service.py b/apps/pre-processing-service/app/service/crawl_service.py index e8785f64..978226b9 100644 --- a/apps/pre-processing-service/app/service/crawl_service.py +++ b/apps/pre-processing-service/app/service/crawl_service.py @@ -11,10 +11,10 @@ class CrawlService: def __init__(self): pass - async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: + async def crawl_product_detail(self, request: RequestSadaguCrawl, max_concurrent: int = 5) -> dict: """ 선택된 상품들의 상세 정보를 크롤링하는 비즈니스 로직입니다. (5단계) - 여러 상품 URL을 입력받아 순차적으로 상세 정보를 크롤링하여 딕셔너리로 반환합니다. + 여러 상품 URL을 입력받아 비동기로 상세 정보를 크롤링하여 딕셔너리로 반환합니다. """ product_urls = [str(url) for url in request.product_urls] @@ -25,70 +25,40 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: fail_count = 0 try: - # 각 상품을 순차적으로 크롤링 (안정성 확보) + # 세마포어로 동시 실행 수 제한 + semaphore = asyncio.Semaphore(max_concurrent) + + # 모든 크롤링 태스크를 동시에 실행 + tasks = [] for i, product_url in enumerate(product_urls, 1): - logger.info(f"상품 {i}/{len(product_urls)} 크롤링 시작: {product_url}") - - crawler = DetailCrawler(use_selenium=True) - - try: - # 상세 정보 크롤링 실행 - product_detail = await crawler.crawl_detail(product_url) - - if product_detail: - product_title = product_detail.get("title", "Unknown")[:50] - logger.success( - f"상품 {i} 크롤링 성공: title='{product_title}', price={product_detail.get('price', 0)}" - ) - - # 성공한 상품 추가 - crawled_products.append( - { - "index": i, - "url": product_url, - "product_detail": product_detail, - "status": "success", - "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), - } - ) + task = self._crawl_single_with_semaphore(semaphore, i, product_url, len(product_urls)) + tasks.append(task) + + # 모든 태스크 동시 실행 및 결과 수집 + results = await asyncio.gather(*tasks, return_exceptions=True) + + # 결과 정리 + for result in results: + if isinstance(result, Exception): + logger.error(f"크롤링 태스크 오류: {result}") + crawled_products.append({ + "index": len(crawled_products) + 1, + "url": "unknown", + "product_detail": None, + "status": "failed", + "error": str(result), + "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), + }) + fail_count += 1 + else: + crawled_products.append(result) + if result["status"] == "success": success_count += 1 else: - logger.error(f"상품 {i} 크롤링 실패: 상세 정보 없음") - crawled_products.append( - { - "index": i, - "url": product_url, - "product_detail": None, - "status": "failed", - "error": "상세 정보 없음", - "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), - } - ) fail_count += 1 - except Exception as e: - logger.error( - f"상품 {i} 크롤링 오류: url={product_url}, error='{e}'" - ) - crawled_products.append( - { - "index": i, - "url": product_url, - "product_detail": None, - "status": "failed", - "error": str(e), - "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), - } - ) - fail_count += 1 - - finally: - # 각 크롤러 개별 정리 - await crawler.close() - - # 상품간 간격 (서버 부하 방지) - if i < len(product_urls): - await asyncio.sleep(1) + # 인덱스 순으로 정렬 + crawled_products.sort(key=lambda x: x["index"]) logger.success( f"전체 크롤링 완료: 총 {len(product_urls)}개, 성공 {success_count}개, 실패 {fail_count}개" @@ -111,10 +81,62 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl) -> dict: logger.error(f"배치 크롤링 서비스 오류: error='{e}'") raise InvalidItemDataException() - # 기존 단일 크롤링 메서드도 유지 (하위 호환성) + async def _crawl_single_with_semaphore(self, semaphore: asyncio.Semaphore, index: int, product_url: str, + total_count: int) -> dict: + """ + 세마포어를 사용한 단일 상품 크롤링 + """ + async with semaphore: + logger.info(f"상품 {index}/{total_count} 크롤링 시작: {product_url}") + + crawler = DetailCrawler(use_selenium=True) + + try: + # 상세 정보 크롤링 실행 + product_detail = await crawler.crawl_detail(product_url) + + if product_detail: + product_title = product_detail.get("title", "Unknown")[:50] + logger.success( + f"상품 {index} 크롤링 성공: title='{product_title}', price={product_detail.get('price', 0)}" + ) + + return { + "index": index, + "url": product_url, + "product_detail": product_detail, + "status": "success", + "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), + } + else: + logger.error(f"상품 {index} 크롤링 실패: 상세 정보 없음") + return { + "index": index, + "url": product_url, + "product_detail": None, + "status": "failed", + "error": "상세 정보 없음", + "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), + } + + except Exception as e: + logger.error(f"상품 {index} 크롤링 오류: url={product_url}, error='{e}'") + return { + "index": index, + "url": product_url, + "product_detail": None, + "status": "failed", + "error": str(e), + "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), + } + + finally: + # 각 크롤러 개별 정리 + await crawler.close() + async def crawl_single_product_detail(self, product_url: str) -> dict: """ - 단일 상품 크롤링 (하위 호환성용) + 단일 상품 크롤링 """ crawler = DetailCrawler(use_selenium=True) @@ -142,4 +164,4 @@ async def crawl_single_product_detail(self, product_url: str) -> dict: logger.error(f"단일 크롤링 오류: url={product_url}, error='{e}'") raise InvalidItemDataException() finally: - await crawler.close() + await crawler.close() \ No newline at end of file diff --git a/apps/pre-processing-service/app/service/crawlers/detail_crawler.py b/apps/pre-processing-service/app/service/crawlers/detail_crawler.py index f01ed53a..097f7e0d 100644 --- a/apps/pre-processing-service/app/service/crawlers/detail_crawler.py +++ b/apps/pre-processing-service/app/service/crawlers/detail_crawler.py @@ -1,5 +1,6 @@ import time import re +import asyncio from bs4 import BeautifulSoup from .search_crawler import SearchCrawler from loguru import logger @@ -13,28 +14,17 @@ async def crawl_detail(self, product_url: str) -> dict: try: logger.info(f"상품 상세 크롤링 시작: url='{product_url}'") - # HTML 가져오기 + # HTML 가져오기 (Selenium 부분을 별도 스레드에서 실행) soup = ( await self._get_soup_selenium(product_url) if self.use_selenium else await self._get_soup_httpx(product_url) ) - # 기본 정보 추출 - title = self._extract_title(soup) - price = self._extract_price(soup) - rating = self._extract_rating(soup) - options = self._extract_options(soup) - material_info = self._extract_material_info(soup) - - # 이미지 정보 추출 (항상 실행) - logger.info("이미지 정보 추출 중...") - page_images = self._extract_images(soup) - option_images = [ - opt["image_url"] for opt in options if opt.get("image_url") - ] - # 중복 제거 후 합치기 - all_images = list(set(page_images + option_images)) + # 기본 정보 추출 (CPU 집약적 작업을 별도 스레드에서 실행) + extraction_tasks = await asyncio.to_thread(self._extract_all_data, soup, product_url) + + title, price, rating, options, material_info, all_images = extraction_tasks product_data = { "url": product_url, @@ -58,20 +48,25 @@ async def crawl_detail(self, product_url: str) -> dict: raise Exception(f"크롤링 실패: {str(e)}") async def _get_soup_selenium(self, product_url: str) -> BeautifulSoup: - """Selenium으로 HTML 가져오기""" - try: - logger.debug(f"Selenium HTML 로딩 시작: url='{product_url}'") - self.driver.get(product_url) - self.wait.until( - lambda driver: driver.execute_script("return document.readyState") - == "complete" - ) - time.sleep(2) - logger.debug("Selenium HTML 로딩 완료") - return BeautifulSoup(self.driver.page_source, "html.parser") - except Exception as e: - logger.error(f"Selenium HTML 로딩 실패: url='{product_url}', error='{e}'") - raise Exception(f"Selenium HTML 로딩 실패: {e}") + """Selenium으로 HTML 가져오기 (별도 스레드에서 실행)""" + + def _selenium_sync(url): + try: + logger.debug(f"Selenium HTML 로딩 시작: url='{url}'") + self.driver.get(url) + self.wait.until( + lambda driver: driver.execute_script("return document.readyState") + == "complete" + ) + time.sleep(2) + logger.debug("Selenium HTML 로딩 완료") + return BeautifulSoup(self.driver.page_source, "html.parser") + except Exception as e: + logger.error(f"Selenium HTML 로딩 실패: url='{url}', error='{e}'") + raise Exception(f"Selenium HTML 로딩 실패: {e}") + + # Selenium 동기 코드를 별도 스레드에서 실행 + return await asyncio.to_thread(_selenium_sync, product_url) async def _get_soup_httpx(self, product_url: str) -> BeautifulSoup: """httpx로 HTML 가져오기""" @@ -85,6 +80,26 @@ async def _get_soup_httpx(self, product_url: str) -> BeautifulSoup: logger.error(f"httpx HTML 요청 실패: url='{product_url}', error='{e}'") raise Exception(f"HTTP 요청 실패: {e}") + def _extract_all_data(self, soup: BeautifulSoup, product_url: str) -> tuple: + """모든 데이터 추출을 한 번에 처리 (동기 함수)""" + # 기본 정보 추출 + title = self._extract_title(soup) + price = self._extract_price(soup) + rating = self._extract_rating(soup) + options = self._extract_options(soup) + material_info = self._extract_material_info(soup) + + # 이미지 정보 추출 + logger.info("이미지 정보 추출 중...") + page_images = self._extract_images(soup) + option_images = [ + opt["image_url"] for opt in options if opt.get("image_url") + ] + # 중복 제거 후 합치기 + all_images = list(set(page_images + option_images)) + + return title, price, rating, options, material_info, all_images + def _extract_title(self, soup: BeautifulSoup) -> str: title_element = soup.find("h1", {"id": "kakaotitle"}) title = title_element.get_text(strip=True) if title_element else "제목 없음" @@ -184,4 +199,4 @@ def _extract_images(self, soup: BeautifulSoup) -> list[str]: src = self.base_url + src images.append(src) logger.info(f"총 {len(images)}개 이미지 URL 추출 완료") - return images + return images \ No newline at end of file From 426b4cfd7254f151a5f2d9cdfa52cbce45dabca0 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Thu, 25 Sep 2025 17:39:07 +0900 Subject: [PATCH 06/18] =?UTF-8?q?refactor:=20RDB=EC=99=80=20selection=20ta?= =?UTF-8?q?sk=20=EC=8B=A4=ED=96=89=EC=8B=9C=20task=5Frun=5Fid=EA=B0=80=20m?= =?UTF-8?q?ismatch=20=EB=90=98=EB=8A=94=20=EB=AC=B8=EC=A0=9C=20=ED=95=B4?= =?UTF-8?q?=EA=B2=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../app/model/schemas.py | 1 + .../app/service/crawl_service.py | 2 + .../app/service/product_selection_service.py | 52 ++-- .../app/service/s3_upload_service.py | 274 +++++++++++------- .../body/ProductSelectBodyBuilder.java | 5 - .../service/WorkflowExecutionService.java | 14 + .../main/resources/sql/03-insert-workflow.sql | 23 +- 7 files changed, 219 insertions(+), 152 deletions(-) diff --git a/apps/pre-processing-service/app/model/schemas.py b/apps/pre-processing-service/app/model/schemas.py index 7487927b..4001b705 100644 --- a/apps/pre-processing-service/app/model/schemas.py +++ b/apps/pre-processing-service/app/model/schemas.py @@ -165,6 +165,7 @@ class ResponseSadaguCrawl(ResponseBase[SadaguCrawlData]): class RequestS3Upload(RequestBase): + task_run_id: int = Field(..., title="Task Run ID", description="워크플로우 실행 ID") keyword: str = Field( ..., title="검색 키워드", description="폴더명 생성용 키워드" ) # 추가 diff --git a/apps/pre-processing-service/app/service/crawl_service.py b/apps/pre-processing-service/app/service/crawl_service.py index 978226b9..768dd3dc 100644 --- a/apps/pre-processing-service/app/service/crawl_service.py +++ b/apps/pre-processing-service/app/service/crawl_service.py @@ -5,6 +5,8 @@ from app.model.schemas import RequestSadaguCrawl from loguru import logger from app.utils.response import Response +import os +os.environ["TOKENIZERS_PARALLELISM"] = "false" class CrawlService: diff --git a/apps/pre-processing-service/app/service/product_selection_service.py b/apps/pre-processing-service/app/service/product_selection_service.py index 723bd940..41b1c8dc 100644 --- a/apps/pre-processing-service/app/service/product_selection_service.py +++ b/apps/pre-processing-service/app/service/product_selection_service.py @@ -1,7 +1,7 @@ import json from typing import List, Dict from loguru import logger -from app.model.schemas import RequestProductSelect +from app.model.schemas import RequestProductSelect, ProductSelectData from app.utils.response import Response from app.db.mariadb_manager import MariadbManager @@ -25,8 +25,15 @@ def select_product_for_content(self, request: RequestProductSelect) -> dict: if not db_products: logger.warning(f"DB에서 상품을 찾을 수 없음: task_run_id={task_run_id}") + # Pydantic Generic Response 구조에 맞춰 data에 항상 객체를 넣음 + data = ProductSelectData( + task_run_id=task_run_id, + selected_product={}, # 상품 없음 + total_available_products=0, + ) return Response.error( - "상품 데이터를 찾을 수 없습니다.", "PRODUCTS_NOT_FOUND" + message="상품 데이터를 찾을 수 없습니다.", + data=data.dict(), ) # 2. 최적 상품 선택 @@ -37,14 +44,16 @@ def select_product_for_content(self, request: RequestProductSelect) -> dict: f"selection_reason={selected_product['selection_reason']}" ) - data = { - "task_run_id": task_run_id, - "selected_product": selected_product, - "total_available_products": len(db_products), - } + # 응답용 데이터 구성 + data = ProductSelectData( + task_run_id=task_run_id, + selected_product=selected_product, + total_available_products=len(db_products), + ) return Response.ok( - data, f"콘텐츠용 상품 선택 완료: {selected_product['name']}" + data=data.dict(), + message=f"콘텐츠용 상품 선택 완료: {selected_product['name']}" ) except Exception as e: @@ -63,7 +72,7 @@ def _fetch_products_from_db(self, task_run_id: int) -> List[Dict]: WHERE task_run_id = %s AND io_type = 'OUTPUT' AND data_type = 'JSON' - ORDER BY name \ + ORDER BY name """ with self.db_manager.get_cursor() as cursor: @@ -73,12 +82,8 @@ def _fetch_products_from_db(self, task_run_id: int) -> List[Dict]: products = [] for row in rows: try: - # MariaDB에서 반환되는 row는 튜플 형태 id, name, data_value_str, created_at = row - - # JSON 데이터 파싱 data_value = json.loads(data_value_str) - products.append( { "id": id, @@ -111,18 +116,13 @@ def _select_best_product(self, db_products: List[Dict]) -> Dict: try: successful_products = [] - # 1순위: S3 업로드 성공하고 이미지가 있는 상품들 + # 1순위: S3 업로드 성공하고 이미지가 있는 상품 for product in db_products: data_value = product.get("data_value", {}) product_detail = data_value.get("product_detail", {}) product_images = product_detail.get("product_images", []) - # 크롤링 성공하고 이미지가 있는 상품 - if ( - data_value.get("status") == "success" - and product_detail - and len(product_images) > 0 - ): + if data_value.get("status") == "success" and product_detail and len(product_images) > 0: successful_products.append( { "product": product, @@ -132,14 +132,11 @@ def _select_best_product(self, db_products: List[Dict]) -> Dict: ) if successful_products: - # 이미지 개수가 가장 많은 상품 선택 best_product = max(successful_products, key=lambda x: x["image_count"]) - logger.info( f"1순위 선택: name={best_product['product']['name']}, " f"images={best_product['image_count']}개" ) - return { "selection_reason": "s3_upload_success_with_most_images", "name": best_product["product"]["name"], @@ -148,15 +145,12 @@ def _select_best_product(self, db_products: List[Dict]) -> Dict: "title": best_product["title"], } - # 2순위: 크롤링 성공한 첫 번째 상품 (이미지 없어도) + # 2순위: 크롤링 성공한 첫 번째 상품 for product in db_products: data_value = product.get("data_value", {}) - if data_value.get("status") == "success" and data_value.get( - "product_detail" - ): + if data_value.get("status") == "success" and data_value.get("product_detail"): product_detail = data_value.get("product_detail", {}) logger.info(f"2순위 선택: name={product['name']}") - return { "selection_reason": "first_crawl_success", "name": product["name"], @@ -170,9 +164,7 @@ def _select_best_product(self, db_products: List[Dict]) -> Dict: first_product = db_products[0] data_value = first_product.get("data_value", {}) product_detail = data_value.get("product_detail", {}) - logger.warning(f"3순위 fallback 선택: name={first_product['name']}") - return { "selection_reason": "fallback_first_product", "name": first_product["name"], diff --git a/apps/pre-processing-service/app/service/s3_upload_service.py b/apps/pre-processing-service/app/service/s3_upload_service.py index c804a201..fd422f42 100644 --- a/apps/pre-processing-service/app/service/s3_upload_service.py +++ b/apps/pre-processing-service/app/service/s3_upload_service.py @@ -20,7 +20,7 @@ def __init__(self): self.s3_util = S3UploadUtil() self.db_manager = MariadbManager() - async def upload_crawled_products_to_s3(self, request: RequestS3Upload) -> dict: + async def upload_crawled_products_to_s3(self, request: RequestS3Upload, max_concurrent: int = 5) -> dict: """ 크롤링된 상품들의 이미지와 데이터를 S3에 업로드하고 DB에 저장하는 비즈니스 로직 (6단계) """ @@ -31,11 +31,24 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload) -> dict: # task_run_id는 자바 워크플로우에서 전달받음 task_run_id = getattr(request, "task_run_id", None) if not task_run_id: - # 임시: task_run_id가 없으면 생성 - task_run_id = int(time.time() * 1000) - logger.warning(f"task_run_id가 없어서 임시로 생성: {task_run_id}") - else: - logger.info(f"자바 워크플로우에서 전달받은 task_run_id: {task_run_id}") + # 자바에서 TaskRun을 만들었으므로 없으면 에러 + logger.error("task_run_id가 없어서 파이썬에서 실행 불가") + return Response.error( + data={ + "upload_results": [], + "db_save_results": [], + "task_run_id": None, + "summary": { + "total_products": 0, + "total_success_images": 0, + "total_fail_images": 0, + "db_success_count": 0, + "db_fail_count": 0, + }, + "uploaded_at": time.strftime("%Y-%m-%d %H:%M:%S"), + }, + message="task_run_id is required from Java workflow" + ) logger.info( f"S3 업로드 + DB 저장 서비스 시작: keyword='{keyword}', " @@ -49,131 +62,174 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload) -> dict: try: # HTTP 세션을 사용한 이미지 다운로드 - ssl_context = ssl.create_default_context(cafile=certifi.where()) connector = aiohttp.TCPConnector(ssl=ssl_context) async with aiohttp.ClientSession(connector=connector) as session: + # 세마포어로 동시 실행 수 제한 + semaphore = asyncio.Semaphore(max_concurrent) - # 각 상품별로 순차 업로드 + # 모든 업로드 태스크를 동시에 실행 + tasks = [] for product_info in crawled_products: - product_index = product_info.get("index", 0) - product_detail = product_info.get("product_detail") - - logger.info( - f"상품 {product_index}/{len(crawled_products)} S3 업로드 + DB 저장 시작" + task = self._upload_single_product_with_semaphore( + semaphore, session, product_info, keyword, base_folder, task_run_id ) + tasks.append(task) - # 크롤링 실패한 상품은 스킵 - if not product_detail or product_info.get("status") != "success": - logger.warning( - f"상품 {product_index}: 크롤링 실패로 인한 업로드 스킵" - ) - upload_results.append( - { - "product_index": product_index, - "product_title": "Unknown", - "status": "skipped", - "folder_s3_url": None, - "uploaded_images": [], - "success_count": 0, - "fail_count": 0, - } - ) - db_save_results.append( - { - "product_index": product_index, - "db_status": "skipped", - "error": "크롤링 실패", - } - ) - continue - - try: - # 1. 상품 이미지 + 데이터 S3 업로드 - upload_result = await self.s3_util.upload_single_product_images( - session, - product_info, - product_index, - keyword, - base_folder, - ) + # 모든 태스크 동시 실행 및 결과 수집 + results = await asyncio.gather(*tasks, return_exceptions=True) + # 결과 정리 + for result in results: + if isinstance(result, Exception): + logger.error(f"업로드 태스크 오류: {result}") + upload_results.append({ + "product_index": len(upload_results) + 1, + "product_title": "Unknown", + "status": "error", + "folder_s3_url": None, + "uploaded_images": [], + "success_count": 0, + "fail_count": 0, + }) + db_save_results.append({ + "product_index": len(db_save_results) + 1, + "db_status": "error", + "error": str(result), + }) + else: + upload_result, db_result = result upload_results.append(upload_result) + db_save_results.append(db_result) + total_success_images += upload_result["success_count"] total_fail_images += upload_result["fail_count"] - # 2. DB에 상품 데이터 저장 - db_result = self._save_product_to_db( - task_run_id, keyword, product_index, product_info - ) - db_save_results.append(db_result) - - logger.success( - f"상품 {product_index} S3 업로드 + DB 저장 완료: " - f"이미지 성공 {upload_result['success_count']}개, DB {db_result['db_status']}" - ) - - except Exception as e: - logger.error( - f"상품 {product_index} S3 업로드/DB 저장 오류: {e}" - ) - upload_results.append( - { - "product_index": product_index, - "product_title": product_detail.get("title", "Unknown"), - "status": "error", - "folder_s3_url": None, - "uploaded_images": [], - "success_count": 0, - "fail_count": 0, - } - ) - db_save_results.append( - { - "product_index": product_index, - "db_status": "error", - "error": str(e), - } - ) - - # 상품간 간격 (서버 부하 방지) - if product_index < len(crawled_products): - await asyncio.sleep(1) + # 인덱스 순으로 정렬 + upload_results.sort(key=lambda x: x["product_index"]) + db_save_results.sort(key=lambda x: x["product_index"]) logger.success( f"S3 업로드 + DB 저장 서비스 완료: 총 성공 이미지 {total_success_images}개, " f"총 실패 이미지 {total_fail_images}개" ) - # 응답 데이터 구성 - data = { - "upload_results": upload_results, - "db_save_results": db_save_results, - "task_run_id": task_run_id, - "summary": { - "total_products": len(crawled_products), - "total_success_images": total_success_images, - "total_fail_images": total_fail_images, - "db_success_count": len( - [r for r in db_save_results if r.get("db_status") == "success"] - ), - "db_fail_count": len( - [r for r in db_save_results if r.get("db_status") == "error"] - ), + # Response.ok 사용하여 올바른 스키마로 응답 + return Response.ok( + data={ + "upload_results": upload_results, + "db_save_results": db_save_results, + "task_run_id": task_run_id, + "summary": { + "total_products": len(crawled_products), + "total_success_images": total_success_images, + "total_fail_images": total_fail_images, + "db_success_count": len( + [r for r in db_save_results if r.get("db_status") == "success"] + ), + "db_fail_count": len( + [r for r in db_save_results if r.get("db_status") == "error"] + ), + }, + "uploaded_at": time.strftime("%Y-%m-%d %H:%M:%S"), }, - "uploaded_at": time.strftime("%Y-%m-%d %H:%M:%S"), - } - - message = f"S3 업로드 + DB 저장 완료: {total_success_images}개 이미지 성공, {len([r for r in db_save_results if r.get('db_status') == 'success'])}개 상품 DB 저장 성공" - return Response.ok(data, message) + message=f"S3 업로드 + DB 저장 완료: 총 성공 이미지 {total_success_images}개, 총 실패 이미지 {total_fail_images}개" + ) except Exception as e: logger.error(f"S3 업로드 + DB 저장 서비스 전체 오류: {e}") - raise InvalidItemDataException() + # Response.error 사용하여 에러도 올바른 스키마로 응답 + return Response.error( + data={ + "upload_results": [], + "db_save_results": [], + "task_run_id": task_run_id, + "summary": { + "total_products": 0, + "total_success_images": 0, + "total_fail_images": 0, + "db_success_count": 0, + "db_fail_count": 0, + }, + "uploaded_at": time.strftime("%Y-%m-%d %H:%M:%S"), + }, + message=f"S3 업로드 서비스 오류: {str(e)}" + ) + + async def _upload_single_product_with_semaphore( + self, + semaphore: asyncio.Semaphore, + session: aiohttp.ClientSession, + product_info: Dict, + keyword: str, + base_folder: str, + task_run_id: int + ) -> tuple: + """세마포어를 사용한 단일 상품 업로드 + DB 저장""" + async with semaphore: + product_index = product_info.get("index", 0) + product_detail = product_info.get("product_detail") + + logger.info(f"상품 {product_index} S3 업로드 + DB 저장 시작") + + # 크롤링 실패한 상품은 스킵 + if not product_detail or product_info.get("status") != "success": + logger.warning(f"상품 {product_index}: 크롤링 실패로 인한 업로드 스킵") + upload_result = { + "product_index": product_index, + "product_title": "Unknown", + "status": "skipped", + "folder_s3_url": None, + "uploaded_images": [], + "success_count": 0, + "fail_count": 0, + } + db_result = { + "product_index": product_index, + "db_status": "skipped", + "error": "크롤링 실패", + } + return upload_result, db_result + + try: + # S3 업로드와 DB 저장을 동시에 실행 + upload_task = self.s3_util.upload_single_product_images( + session, product_info, product_index, keyword, base_folder + ) + db_task = asyncio.to_thread( + self._save_product_to_db, task_run_id, keyword, product_index, product_info + ) + + upload_result, db_result = await asyncio.gather(upload_task, db_task) + + logger.success( + f"상품 {product_index} S3 업로드 + DB 저장 완료: " + f"이미지 성공 {upload_result['success_count']}개, DB {db_result['db_status']}" + ) + + return upload_result, db_result + + except Exception as e: + logger.error(f"상품 {product_index} S3 업로드/DB 저장 오류: {e}") + upload_result = { + "product_index": product_index, + "product_title": product_detail.get("title", "Unknown"), + "status": "error", + "folder_s3_url": None, + "uploaded_images": [], + "success_count": 0, + "fail_count": 0, + } + db_result = { + "product_index": product_index, + "db_status": "error", + "error": str(e), + } + return upload_result, db_result def _save_product_to_db( - self, task_run_id: int, keyword: str, product_index: int, product_info: Dict + self, task_run_id: int, keyword: str, product_index: int, product_info: Dict ) -> Dict: """ 상품 데이터를 TASK_IO_DATA 테이블에 저장 (MariaDB) @@ -192,8 +248,8 @@ def _save_product_to_db( with self.db_manager.get_cursor() as cursor: sql = """ INSERT INTO task_io_data - (task_run_id, io_type, name, data_type, data_value, created_at) - VALUES (%s, %s, %s, %s, %s, %s) \ + (task_run_id, io_type, name, data_type, data_value, created_at) + VALUES (%s, %s, %s, %s, %s, %s) """ cursor.execute( @@ -223,4 +279,4 @@ def _save_product_to_db( "product_index": product_index, "db_status": "error", "error": str(e), - } + } \ No newline at end of file diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/ProductSelectBodyBuilder.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/ProductSelectBodyBuilder.java index 17934012..a8a885ed 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/ProductSelectBodyBuilder.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/runner/fastapi/body/ProductSelectBodyBuilder.java @@ -28,11 +28,6 @@ public boolean supports(String taskName) { public ObjectNode build(Task task, Map workflowContext) { ObjectNode body = objectMapper.createObjectNode(); - // task_run_id는 현재 실행 중인 task의 run_id를 사용 - // 실제 구현에서는 Task 객체나 워크플로우 컨텍스트에서 가져와야 할 수 있습니다. - body.put("task_run_id", task.getId()); // Task 객체에서 ID를 가져오는 방식으로 가정 - - // 기본 선택 기준 설정 (이미지 개수 우선) body.put("selection_criteria", "image_count_priority"); return body; diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java index 3fa71524..e2b663ab 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java @@ -128,6 +128,7 @@ private boolean executeTasksForJob( workflowLogger.info( "Job (JobRunId={}) 내 총 {}개의 Task를 순차 실행합니다.", jobRun.getId(), taskDtos.size()); boolean hasAnyTaskFailed = false; + Long s3UploadTaskRunId = null; // S3 업로드 태스크의 task_run_id 저장용 for (TaskDto taskDto : taskDtos) { try { @@ -146,6 +147,19 @@ private boolean executeTasksForJob( .map(builder -> builder.build(task, workflowContext)) .orElse(objectMapper.createObjectNode()); + if ("S3 업로드 태스크".equals(task.getName())) { + requestBody.put("task_run_id", taskRun.getId()); + s3UploadTaskRunId = taskRun.getId(); // S3 업로드의 task_run_id 저장 + } else if ("상품 선택 태스크".equals(task.getName())) { + // S3 업로드에서 사용한 task_run_id를 사용 + if (s3UploadTaskRunId != null) { + requestBody.put("task_run_id", s3UploadTaskRunId); + } else { + workflowLogger.error("S3 업로드 태스크가 먼저 실행되지 않아 task_run_id를 찾을 수 없습니다."); + // 또는 이전 Job에서 S3 업로드를 찾는 로직 추가 가능 + } + } + TaskRunner.TaskExecutionResult result = taskExecutionService.executeWithRetry(task, taskRun, requestBody); taskRun.finish(result.status(), result.message()); diff --git a/apps/user-service/src/main/resources/sql/03-insert-workflow.sql b/apps/user-service/src/main/resources/sql/03-insert-workflow.sql index 9238b8a2..379140b5 100644 --- a/apps/user-service/src/main/resources/sql/03-insert-workflow.sql +++ b/apps/user-service/src/main/resources/sql/03-insert-workflow.sql @@ -16,7 +16,7 @@ DELETE FROM `workflow`; -- 워크플로우 생성 (ID: 1) INSERT INTO `workflow` (`id`, `name`, `description`, `created_by`, `default_config`) VALUES (1, '상품 분석 및 블로그 자동 발행', '키워드 검색부터 상품 분석 후 블로그 발행까지의 자동화 프로세스', 1, - JSON_OBJECT('1',json_object('tag','naver'),'8',json_object('tag','naver_blog','blog_id', 'wtecho331', 'blog_pw', 'testpass'))) + JSON_OBJECT('1',json_object('tag','naver'),'9',json_object('tag','blogger','blog_id', '', 'blog_pw', ''))) ON DUPLICATE KEY UPDATE name = VALUES(name), description = VALUES(description), @@ -27,7 +27,7 @@ INSERT INTO `job` (`id`, `name`, `description`, `created_by`) VALUES (2, '블로그 콘텐츠 생성', '분석 데이터를 기반으로 RAG 콘텐츠 생성 및 발행 작업', 1) ON DUPLICATE KEY UPDATE name = VALUES(name), description = VALUES(description), updated_at = NOW(); --- Task 생성 (ID: 1 ~ 7) - FastAPI Request Body 스키마 반영 +-- Task 생성 (ID: 1 ~ 9) INSERT INTO `task` (`id`, `name`, `type`, `parameters`) VALUES (1, '키워드 검색 태스크', 'FastAPI', JSON_OBJECT( 'endpoint', '/keywords/search', 'method', 'POST', @@ -56,7 +56,6 @@ INSERT INTO `task` (`id`, `name`, `type`, `parameters`) VALUES 'endpoint', '/products/crawl', 'method', 'POST', 'body', JSON_OBJECT('product_urls', 'List') -- { "product_urls": List[str] } 수정됨 )), - -- 🆕 S3 업로드 태스크 추가 (6, 'S3 업로드 태스크', 'FastAPI', JSON_OBJECT( 'endpoint', '/products/s3-upload', 'method', 'POST', 'body', JSON_OBJECT( -- { keyword: str, crawled_products: List, base_folder: str } @@ -65,9 +64,16 @@ INSERT INTO `task` (`id`, `name`, `type`, `parameters`) VALUES 'base_folder', 'String' ) )), + (7, '상품 선택 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/products/select', 'method', 'POST', + 'body', JSON_OBJECT( -- { task_run_id: int, selection_criteria: str } + 'task_run_id', 'Integer', + 'selection_criteria', 'String' + ) + )), -- RAG관련 request body는 추후에 결정될 예정 - (7, '블로그 RAG 생성 태스크', 'FastAPI', JSON_OBJECT('endpoint', '/blogs/rag/create', 'method', 'POST')), - (8, '블로그 발행 태스크', 'FastAPI', JSON_OBJECT( + (8, '블로그 RAG 생성 태스크', 'FastAPI', JSON_OBJECT('endpoint', '/blogs/rag/create', 'method', 'POST')), + (9, '블로그 발행 태스크', 'FastAPI', JSON_OBJECT( 'endpoint', '/blogs/publish', 'method', 'POST', 'body', JSON_OBJECT( -- { tag: str, blog_id: str, ... } 'tag', 'String', @@ -92,9 +98,10 @@ INSERT INTO `workflow_job` (`workflow_id`, `job_id`, `execution_order`) VALUES -- Job-Task 연결 INSERT INTO `job_task` (`job_id`, `task_id`, `execution_order`) VALUES - -- Job 1: 상품 분석 (키워드검색 → 상품검색 → 매칭 → 유사도 → 크롤링 → S3업로드) - (1, 1, 1), (1, 2, 2), (1, 3, 3), (1, 4, 4), (1, 5, 5), (1, 6, 6), - (2, 7, 1), (2, 8, 2) + -- Job 1: 상품 분석 (키워드검색 → 상품검색 → 매칭 → 유사도 → 크롤링 → S3업로드 → 상품선택) + (1, 1, 1), (1, 2, 2), (1, 3, 3), (1, 4, 4), (1, 5, 5), (1, 6, 6), (1, 7, 7), + -- Job 2: 블로그 콘텐츠 생성 (RAG생성 → 발행) + (2, 8, 1), (2, 9, 2) ON DUPLICATE KEY UPDATE execution_order = VALUES(execution_order); -- 스케줄 설정 (매일 오전 8시) From f9845f3ddcc47c2be81525c7390df20bea0fdb3a Mon Sep 17 00:00:00 2001 From: thkim7 Date: Thu, 25 Sep 2025 17:39:39 +0900 Subject: [PATCH 07/18] chore: poetry run black . --- .../app/service/crawl_service.py | 42 ++++++--- .../app/service/crawlers/detail_crawler.py | 12 +-- .../app/service/product_selection_service.py | 12 ++- .../app/service/s3_upload_service.py | 85 ++++++++++++------- 4 files changed, 97 insertions(+), 54 deletions(-) diff --git a/apps/pre-processing-service/app/service/crawl_service.py b/apps/pre-processing-service/app/service/crawl_service.py index 768dd3dc..3d1183eb 100644 --- a/apps/pre-processing-service/app/service/crawl_service.py +++ b/apps/pre-processing-service/app/service/crawl_service.py @@ -6,6 +6,7 @@ from loguru import logger from app.utils.response import Response import os + os.environ["TOKENIZERS_PARALLELISM"] = "false" @@ -13,7 +14,9 @@ class CrawlService: def __init__(self): pass - async def crawl_product_detail(self, request: RequestSadaguCrawl, max_concurrent: int = 5) -> dict: + async def crawl_product_detail( + self, request: RequestSadaguCrawl, max_concurrent: int = 5 + ) -> dict: """ 선택된 상품들의 상세 정보를 크롤링하는 비즈니스 로직입니다. (5단계) 여러 상품 URL을 입력받아 비동기로 상세 정보를 크롤링하여 딕셔너리로 반환합니다. @@ -33,7 +36,9 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl, max_concurrent # 모든 크롤링 태스크를 동시에 실행 tasks = [] for i, product_url in enumerate(product_urls, 1): - task = self._crawl_single_with_semaphore(semaphore, i, product_url, len(product_urls)) + task = self._crawl_single_with_semaphore( + semaphore, i, product_url, len(product_urls) + ) tasks.append(task) # 모든 태스크 동시 실행 및 결과 수집 @@ -43,14 +48,16 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl, max_concurrent for result in results: if isinstance(result, Exception): logger.error(f"크롤링 태스크 오류: {result}") - crawled_products.append({ - "index": len(crawled_products) + 1, - "url": "unknown", - "product_detail": None, - "status": "failed", - "error": str(result), - "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), - }) + crawled_products.append( + { + "index": len(crawled_products) + 1, + "url": "unknown", + "product_detail": None, + "status": "failed", + "error": str(result), + "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S"), + } + ) fail_count += 1 else: crawled_products.append(result) @@ -83,8 +90,13 @@ async def crawl_product_detail(self, request: RequestSadaguCrawl, max_concurrent logger.error(f"배치 크롤링 서비스 오류: error='{e}'") raise InvalidItemDataException() - async def _crawl_single_with_semaphore(self, semaphore: asyncio.Semaphore, index: int, product_url: str, - total_count: int) -> dict: + async def _crawl_single_with_semaphore( + self, + semaphore: asyncio.Semaphore, + index: int, + product_url: str, + total_count: int, + ) -> dict: """ 세마포어를 사용한 단일 상품 크롤링 """ @@ -122,7 +134,9 @@ async def _crawl_single_with_semaphore(self, semaphore: asyncio.Semaphore, index } except Exception as e: - logger.error(f"상품 {index} 크롤링 오류: url={product_url}, error='{e}'") + logger.error( + f"상품 {index} 크롤링 오류: url={product_url}, error='{e}'" + ) return { "index": index, "url": product_url, @@ -166,4 +180,4 @@ async def crawl_single_product_detail(self, product_url: str) -> dict: logger.error(f"단일 크롤링 오류: url={product_url}, error='{e}'") raise InvalidItemDataException() finally: - await crawler.close() \ No newline at end of file + await crawler.close() diff --git a/apps/pre-processing-service/app/service/crawlers/detail_crawler.py b/apps/pre-processing-service/app/service/crawlers/detail_crawler.py index 097f7e0d..38c6d56c 100644 --- a/apps/pre-processing-service/app/service/crawlers/detail_crawler.py +++ b/apps/pre-processing-service/app/service/crawlers/detail_crawler.py @@ -22,7 +22,9 @@ async def crawl_detail(self, product_url: str) -> dict: ) # 기본 정보 추출 (CPU 집약적 작업을 별도 스레드에서 실행) - extraction_tasks = await asyncio.to_thread(self._extract_all_data, soup, product_url) + extraction_tasks = await asyncio.to_thread( + self._extract_all_data, soup, product_url + ) title, price, rating, options, material_info, all_images = extraction_tasks @@ -56,7 +58,7 @@ def _selenium_sync(url): self.driver.get(url) self.wait.until( lambda driver: driver.execute_script("return document.readyState") - == "complete" + == "complete" ) time.sleep(2) logger.debug("Selenium HTML 로딩 완료") @@ -92,9 +94,7 @@ def _extract_all_data(self, soup: BeautifulSoup, product_url: str) -> tuple: # 이미지 정보 추출 logger.info("이미지 정보 추출 중...") page_images = self._extract_images(soup) - option_images = [ - opt["image_url"] for opt in options if opt.get("image_url") - ] + option_images = [opt["image_url"] for opt in options if opt.get("image_url")] # 중복 제거 후 합치기 all_images = list(set(page_images + option_images)) @@ -199,4 +199,4 @@ def _extract_images(self, soup: BeautifulSoup) -> list[str]: src = self.base_url + src images.append(src) logger.info(f"총 {len(images)}개 이미지 URL 추출 완료") - return images \ No newline at end of file + return images diff --git a/apps/pre-processing-service/app/service/product_selection_service.py b/apps/pre-processing-service/app/service/product_selection_service.py index 41b1c8dc..590bf15e 100644 --- a/apps/pre-processing-service/app/service/product_selection_service.py +++ b/apps/pre-processing-service/app/service/product_selection_service.py @@ -53,7 +53,7 @@ def select_product_for_content(self, request: RequestProductSelect) -> dict: return Response.ok( data=data.dict(), - message=f"콘텐츠용 상품 선택 완료: {selected_product['name']}" + message=f"콘텐츠용 상품 선택 완료: {selected_product['name']}", ) except Exception as e: @@ -122,7 +122,11 @@ def _select_best_product(self, db_products: List[Dict]) -> Dict: product_detail = data_value.get("product_detail", {}) product_images = product_detail.get("product_images", []) - if data_value.get("status") == "success" and product_detail and len(product_images) > 0: + if ( + data_value.get("status") == "success" + and product_detail + and len(product_images) > 0 + ): successful_products.append( { "product": product, @@ -148,7 +152,9 @@ def _select_best_product(self, db_products: List[Dict]) -> Dict: # 2순위: 크롤링 성공한 첫 번째 상품 for product in db_products: data_value = product.get("data_value", {}) - if data_value.get("status") == "success" and data_value.get("product_detail"): + if data_value.get("status") == "success" and data_value.get( + "product_detail" + ): product_detail = data_value.get("product_detail", {}) logger.info(f"2순위 선택: name={product['name']}") return { diff --git a/apps/pre-processing-service/app/service/s3_upload_service.py b/apps/pre-processing-service/app/service/s3_upload_service.py index fd422f42..725db0ec 100644 --- a/apps/pre-processing-service/app/service/s3_upload_service.py +++ b/apps/pre-processing-service/app/service/s3_upload_service.py @@ -20,7 +20,9 @@ def __init__(self): self.s3_util = S3UploadUtil() self.db_manager = MariadbManager() - async def upload_crawled_products_to_s3(self, request: RequestS3Upload, max_concurrent: int = 5) -> dict: + async def upload_crawled_products_to_s3( + self, request: RequestS3Upload, max_concurrent: int = 5 + ) -> dict: """ 크롤링된 상품들의 이미지와 데이터를 S3에 업로드하고 DB에 저장하는 비즈니스 로직 (6단계) """ @@ -47,7 +49,7 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload, max_conc }, "uploaded_at": time.strftime("%Y-%m-%d %H:%M:%S"), }, - message="task_run_id is required from Java workflow" + message="task_run_id is required from Java workflow", ) logger.info( @@ -73,7 +75,12 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload, max_conc tasks = [] for product_info in crawled_products: task = self._upload_single_product_with_semaphore( - semaphore, session, product_info, keyword, base_folder, task_run_id + semaphore, + session, + product_info, + keyword, + base_folder, + task_run_id, ) tasks.append(task) @@ -84,20 +91,24 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload, max_conc for result in results: if isinstance(result, Exception): logger.error(f"업로드 태스크 오류: {result}") - upload_results.append({ - "product_index": len(upload_results) + 1, - "product_title": "Unknown", - "status": "error", - "folder_s3_url": None, - "uploaded_images": [], - "success_count": 0, - "fail_count": 0, - }) - db_save_results.append({ - "product_index": len(db_save_results) + 1, - "db_status": "error", - "error": str(result), - }) + upload_results.append( + { + "product_index": len(upload_results) + 1, + "product_title": "Unknown", + "status": "error", + "folder_s3_url": None, + "uploaded_images": [], + "success_count": 0, + "fail_count": 0, + } + ) + db_save_results.append( + { + "product_index": len(db_save_results) + 1, + "db_status": "error", + "error": str(result), + } + ) else: upload_result, db_result = result upload_results.append(upload_result) @@ -126,15 +137,23 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload, max_conc "total_success_images": total_success_images, "total_fail_images": total_fail_images, "db_success_count": len( - [r for r in db_save_results if r.get("db_status") == "success"] + [ + r + for r in db_save_results + if r.get("db_status") == "success" + ] ), "db_fail_count": len( - [r for r in db_save_results if r.get("db_status") == "error"] + [ + r + for r in db_save_results + if r.get("db_status") == "error" + ] ), }, "uploaded_at": time.strftime("%Y-%m-%d %H:%M:%S"), }, - message=f"S3 업로드 + DB 저장 완료: 총 성공 이미지 {total_success_images}개, 총 실패 이미지 {total_fail_images}개" + message=f"S3 업로드 + DB 저장 완료: 총 성공 이미지 {total_success_images}개, 총 실패 이미지 {total_fail_images}개", ) except Exception as e: @@ -154,17 +173,17 @@ async def upload_crawled_products_to_s3(self, request: RequestS3Upload, max_conc }, "uploaded_at": time.strftime("%Y-%m-%d %H:%M:%S"), }, - message=f"S3 업로드 서비스 오류: {str(e)}" + message=f"S3 업로드 서비스 오류: {str(e)}", ) async def _upload_single_product_with_semaphore( - self, - semaphore: asyncio.Semaphore, - session: aiohttp.ClientSession, - product_info: Dict, - keyword: str, - base_folder: str, - task_run_id: int + self, + semaphore: asyncio.Semaphore, + session: aiohttp.ClientSession, + product_info: Dict, + keyword: str, + base_folder: str, + task_run_id: int, ) -> tuple: """세마포어를 사용한 단일 상품 업로드 + DB 저장""" async with semaphore: @@ -198,7 +217,11 @@ async def _upload_single_product_with_semaphore( session, product_info, product_index, keyword, base_folder ) db_task = asyncio.to_thread( - self._save_product_to_db, task_run_id, keyword, product_index, product_info + self._save_product_to_db, + task_run_id, + keyword, + product_index, + product_info, ) upload_result, db_result = await asyncio.gather(upload_task, db_task) @@ -229,7 +252,7 @@ async def _upload_single_product_with_semaphore( return upload_result, db_result def _save_product_to_db( - self, task_run_id: int, keyword: str, product_index: int, product_info: Dict + self, task_run_id: int, keyword: str, product_index: int, product_info: Dict ) -> Dict: """ 상품 데이터를 TASK_IO_DATA 테이블에 저장 (MariaDB) @@ -279,4 +302,4 @@ def _save_product_to_db( "product_index": product_index, "db_status": "error", "error": str(e), - } \ No newline at end of file + } From d561ab356dbd16049587befb340a0ab9825232c3 Mon Sep 17 00:00:00 2001 From: thkim7 Date: Thu, 25 Sep 2025 17:39:56 +0900 Subject: [PATCH 08/18] chore: spotlessApply --- .../service/WorkflowExecutionService.java | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java index e2b663ab..afc4f555 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowExecutionService.java @@ -128,7 +128,7 @@ private boolean executeTasksForJob( workflowLogger.info( "Job (JobRunId={}) 내 총 {}개의 Task를 순차 실행합니다.", jobRun.getId(), taskDtos.size()); boolean hasAnyTaskFailed = false; - Long s3UploadTaskRunId = null; // S3 업로드 태스크의 task_run_id 저장용 + Long s3UploadTaskRunId = null; // S3 업로드 태스크의 task_run_id 저장용 for (TaskDto taskDto : taskDtos) { try { @@ -147,18 +147,18 @@ private boolean executeTasksForJob( .map(builder -> builder.build(task, workflowContext)) .orElse(objectMapper.createObjectNode()); - if ("S3 업로드 태스크".equals(task.getName())) { - requestBody.put("task_run_id", taskRun.getId()); - s3UploadTaskRunId = taskRun.getId(); // S3 업로드의 task_run_id 저장 - } else if ("상품 선택 태스크".equals(task.getName())) { - // S3 업로드에서 사용한 task_run_id를 사용 - if (s3UploadTaskRunId != null) { - requestBody.put("task_run_id", s3UploadTaskRunId); - } else { - workflowLogger.error("S3 업로드 태스크가 먼저 실행되지 않아 task_run_id를 찾을 수 없습니다."); - // 또는 이전 Job에서 S3 업로드를 찾는 로직 추가 가능 - } + if ("S3 업로드 태스크".equals(task.getName())) { + requestBody.put("task_run_id", taskRun.getId()); + s3UploadTaskRunId = taskRun.getId(); // S3 업로드의 task_run_id 저장 + } else if ("상품 선택 태스크".equals(task.getName())) { + // S3 업로드에서 사용한 task_run_id를 사용 + if (s3UploadTaskRunId != null) { + requestBody.put("task_run_id", s3UploadTaskRunId); + } else { + workflowLogger.error("S3 업로드 태스크가 먼저 실행되지 않아 task_run_id를 찾을 수 없습니다."); + // 또는 이전 Job에서 S3 업로드를 찾는 로직 추가 가능 } + } TaskRunner.TaskExecutionResult result = taskExecutionService.executeWithRetry(task, taskRun, requestBody); From f3210d3d7d235415f7999c034611f2f58105512d Mon Sep 17 00:00:00 2001 From: Jihu Kim Date: Thu, 25 Sep 2025 18:54:37 +0900 Subject: [PATCH 09/18] =?UTF-8?q?Workflow=20=EC=88=98=EB=8F=99=20=EC=8B=A4?= =?UTF-8?q?=ED=96=89=20=EB=B0=8F=20Retry=20=EB=A1=9C=EC=A7=81=20=ED=85=8C?= =?UTF-8?q?=EC=8A=A4=ED=8A=B8=20=EC=BD=94=EB=93=9C=20=EC=9E=91=EC=84=B1=20?= =?UTF-8?q?(#209)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor: integration log4j2 설정파일 분리 * refactor: RetryConfig 구성 * feat: Workflow 수동 실행 api 테스트 코드 작성 * feat: task 실패 시 재시도 동작 테스트 코드 작성 * refactor: ApplicationListener로 변경 * refactor: Code Formatting * refactor: javadoc 수정 --- .../service/TaskExecutionService.java | 102 ++++++------------ .../config/QuartzSchedulerInitializer.java | 18 ++-- .../global/config/retry/RetryConfig.java | 28 +++++ .../application-test-integration.yml | 2 +- .../resources/log4j2-test-integration.yml | 77 +++++++++++++ .../TaskExecutionServiceIntegrationTest.java | 60 +++++++++++ .../WorkflowRunApiIntegrationTest.java | 64 +++++++++++ 7 files changed, 272 insertions(+), 79 deletions(-) create mode 100644 apps/user-service/src/main/java/site/icebang/global/config/retry/RetryConfig.java create mode 100644 apps/user-service/src/main/resources/log4j2-test-integration.yml create mode 100644 apps/user-service/src/test/java/site/icebang/integration/tests/workflow/TaskExecutionServiceIntegrationTest.java create mode 100644 apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowRunApiIntegrationTest.java diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/TaskExecutionService.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/TaskExecutionService.java index 29f28d98..62b72fed 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/TaskExecutionService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/TaskExecutionService.java @@ -4,11 +4,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.retry.annotation.Backoff; -import org.springframework.retry.annotation.Recover; -import org.springframework.retry.annotation.Retryable; +import org.springframework.retry.support.RetryTemplate; import org.springframework.stereotype.Service; -import org.springframework.web.client.RestClientException; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -18,80 +15,45 @@ import site.icebang.domain.workflow.model.TaskRun; import site.icebang.domain.workflow.runner.TaskRunner; -/** - * 워크플로우 내 개별 Task의 실행과 재시도 정책을 전담하는 서비스입니다. - * - *

이 클래스는 {@code WorkflowExecutionService}로부터 Task 실행 책임을 위임받습니다. Spring AOP의 '자기 - * 호출(Self-invocation)' 문제를 회피하고, 재시도 로직을 비즈니스 흐름과 분리하기 위해 별도의 서비스로 구현되었습니다. - * - *

주요 기능:

- * - *
    - *
  • {@code @Retryable} 어노테이션을 통한 선언적 재시도 처리 - *
  • {@code @Recover} 어노테이션을 이용한 최종 실패 시 복구 로직 수행 - *
  • Task 타입에 맞는 적절한 {@code TaskRunner} 선택 및 실행 - *
- * - * @author jihu0210@naver.com - * @since v0.1.0 - */ @Service @RequiredArgsConstructor public class TaskExecutionService { - /** 워크플로우 실행 이력 전용 로거 */ private static final Logger workflowLogger = LoggerFactory.getLogger("WORKFLOW_HISTORY"); - private final Map taskRunners; + private final RetryTemplate taskExecutionRetryTemplate; // 📌 RetryTemplate 주입 - /** - * 지정된 Task를 재시도 정책을 적용하여 실행합니다. - * - *

HTTP 통신 오류 등 {@code RestClientException} 발생 시, 5초의 고정된 간격({@code Backoff})으로 최대 3회({@code - * maxAttempts})까지 실행을 재시도합니다. 지원하지 않는 Task 타입의 경우 재시도 없이 즉시 {@code IllegalArgumentException}을 - * 발생시킵니다. - * - * @param task 실행할 Task의 도메인 모델 - * @param taskRun 현재 실행에 대한 기록 객체 - * @param requestBody 동적으로 생성된 요청 Body - * @return Task 실행 결과 - * @throws IllegalArgumentException 지원하지 않는 Task 타입일 경우 - * @since v0.1.0 - */ - @Retryable( - value = {RestClientException.class}, - maxAttempts = 3, - backoff = @Backoff(delay = 5000)) + // 📌 @Retryable, @Recover 어노테이션 제거 public TaskRunner.TaskExecutionResult executeWithRetry( Task task, TaskRun taskRun, ObjectNode requestBody) { - workflowLogger.info("Task 실행 시도: TaskId={}, TaskRunId={}", task.getId(), taskRun.getId()); - - String runnerBeanName = task.getType().toLowerCase() + "TaskRunner"; - TaskRunner runner = taskRunners.get(runnerBeanName); - - if (runner == null) { - throw new IllegalArgumentException("지원하지 않는 Task 타입: " + task.getType()); - } - - return runner.execute(task, taskRun, requestBody); - } - /** - * {@code @Retryable} 재시도가 모두 실패했을 때 호출되는 복구 메소드입니다. - * - *

이 메소드는 {@code executeWithRetry} 메소드와 동일한 파라미터 시그니처를 가지며, 발생한 예외를 첫 번째 파라미터로 추가로 받습니다. 최종 실패 - * 상태를 기록하고 실패 결과를 반환하는 역할을 합니다. - * - * @param e 재시도를 유발한 마지막 예외 객체 - * @param task 실패한 Task의 도메인 모델 - * @param taskRun 실패한 실행의 기록 객체 - * @param requestBody 실패 당시 사용된 요청 Body - * @return 최종 실패를 나타내는 Task 실행 결과 - * @since v0.1.0 - */ - @Recover - public TaskRunner.TaskExecutionResult recover( - RestClientException e, Task task, TaskRun taskRun, ObjectNode requestBody) { - workflowLogger.error("최종 Task 실행 실패 (모든 재시도 소진): TaskRunId={}", taskRun.getId(), e); - return TaskRunner.TaskExecutionResult.failure("최대 재시도 횟수 초과: " + e.getMessage()); + // RetryTemplate을 사용하여 실행 로직을 감쌉니다. + return taskExecutionRetryTemplate.execute( + // 1. 재시도할 로직 (RetryCallback) + context -> { + // 📌 이 블록은 재시도할 때마다 실행되므로, 로그가 누락되지 않습니다. + workflowLogger.info( + "Task 실행 시도 #{}: TaskId={}, TaskRunId={}", + context.getRetryCount() + 1, + task.getId(), + taskRun.getId()); + + String runnerBeanName = task.getType().toLowerCase() + "TaskRunner"; + TaskRunner runner = taskRunners.get(runnerBeanName); + + if (runner == null) { + throw new IllegalArgumentException("지원하지 않는 Task 타입: " + task.getType()); + } + + // 이 부분에서 RestClientException 발생 시 재시도됩니다. + return runner.execute(task, taskRun, requestBody); + }, + // 2. 모든 재시도가 실패했을 때 실행될 로직 (RecoveryCallback) + context -> { + Throwable lastThrowable = context.getLastThrowable(); + workflowLogger.error( + "최종 Task 실행 실패 (모든 재시도 소진): TaskRunId={}", taskRun.getId(), lastThrowable); + return TaskRunner.TaskExecutionResult.failure( + "최대 재시도 횟수 초과: " + lastThrowable.getMessage()); + }); } } diff --git a/apps/user-service/src/main/java/site/icebang/global/config/QuartzSchedulerInitializer.java b/apps/user-service/src/main/java/site/icebang/global/config/QuartzSchedulerInitializer.java index bdca3015..9ebd150f 100644 --- a/apps/user-service/src/main/java/site/icebang/global/config/QuartzSchedulerInitializer.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/QuartzSchedulerInitializer.java @@ -3,6 +3,8 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.boot.CommandLineRunner; +import org.springframework.context.ApplicationListener; +import org.springframework.context.event.ContextRefreshedEvent; import org.springframework.stereotype.Component; import site.icebang.domain.schedule.model.Schedule; import site.icebang.domain.schedule.mapper.ScheduleMapper; @@ -12,13 +14,13 @@ /** * 애플리케이션 시작 시 데이터베이스에 저장된 스케줄을 Quartz 스케줄러에 동적으로 등록하는 초기화 클래스입니다. * - *

이 클래스는 {@code CommandLineRunner}를 구현하여, Spring Boot 애플리케이션이 완전히 - * 로드된 후 단 한 번 실행됩니다. 데이터베이스의 {@code schedule} 테이블을 'Source of Truth'로 삼아, - * 활성화된 모든 스케줄을 읽어와 Quartz 엔진에 동기화하는 매우 중요한 역할을 수행합니다. + *

이 클래스는 {@code ApplicationListener}를 구현하여, Spring의 ApplicationContext가 + * 완전히 초기화되고 모든 Bean이 준비되었을 때 단 한 번 실행됩니다. 데이터베이스의 {@code schedule} 테이블을 + * 'Source of Truth'로 삼아, 활성화된 모든 스케줄을 읽어와 Quartz 엔진에 동기화하는 매우 중요한 역할을 수행합니다. * *

주요 기능:

*
    - *
  • 애플리케이션 시작 시점에 DB의 활성 스케줄 조회
  • + *
  • 애플리케이션 컨텍스트 초기화 완료 시점에 DB의 활성 스케줄 조회
  • *
  • 조회된 스케줄을 {@code QuartzScheduleService}를 통해 Quartz 엔진에 등록
  • *
* @@ -28,22 +30,22 @@ @Slf4j @Component @RequiredArgsConstructor -public class QuartzSchedulerInitializer implements CommandLineRunner { +public class QuartzSchedulerInitializer implements ApplicationListener { private final ScheduleMapper scheduleMapper; private final QuartzScheduleService quartzScheduleService; /** - * Spring Boot 애플리케이션 시작 시 호출되는 메인 실행 메소드입니다. + * Spring ApplicationContext가 완전히 새로고침(초기화)될 때 호출되는 이벤트 핸들러 메소드입니다. * *

데이터베이스에서 활성화된 모든 스케줄을 조회하고, 각 스케줄을 * {@code QuartzScheduleService}를 통해 Quartz 스케줄러에 등록합니다. * - * @param args 애플리케이션 실행 시 전달되는 인자 + * @param event 발생한 ContextRefreshedEvent 객체 * @since v0.1.0 */ @Override - public void run(String... args) { + public void onApplicationEvent(ContextRefreshedEvent event) { log.info("Quartz 스케줄러 초기화 시작: DB 스케줄을 등록합니다."); try { List activeSchedules = scheduleMapper.findAllActive(); diff --git a/apps/user-service/src/main/java/site/icebang/global/config/retry/RetryConfig.java b/apps/user-service/src/main/java/site/icebang/global/config/retry/RetryConfig.java new file mode 100644 index 00000000..98cda2bc --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/global/config/retry/RetryConfig.java @@ -0,0 +1,28 @@ +package site.icebang.global.config.retry; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.retry.backoff.FixedBackOffPolicy; +import org.springframework.retry.policy.SimpleRetryPolicy; +import org.springframework.retry.support.RetryTemplate; + +@Configuration +public class RetryConfig { + + @Bean + public RetryTemplate taskExecutionRetryTemplate() { + RetryTemplate retryTemplate = new RetryTemplate(); + + // 1. 재시도 정책 설정: 최대 3번 시도 + SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy(); + retryPolicy.setMaxAttempts(3); + retryTemplate.setRetryPolicy(retryPolicy); + + // 2. 재시도 간격 설정: 5초 고정 간격 + FixedBackOffPolicy backOffPolicy = new FixedBackOffPolicy(); + backOffPolicy.setBackOffPeriod(5000L); // 5000ms = 5초 + retryTemplate.setBackOffPolicy(backOffPolicy); + + return retryTemplate; + } +} diff --git a/apps/user-service/src/main/resources/application-test-integration.yml b/apps/user-service/src/main/resources/application-test-integration.yml index 526cf151..6eccdace 100644 --- a/apps/user-service/src/main/resources/application-test-integration.yml +++ b/apps/user-service/src/main/resources/application-test-integration.yml @@ -39,4 +39,4 @@ mybatis: map-underscore-to-camel-case: true logging: - config: classpath:log4j2-test-unit.yml \ No newline at end of file + config: classpath:log4j2-test-integration.yml \ No newline at end of file diff --git a/apps/user-service/src/main/resources/log4j2-test-integration.yml b/apps/user-service/src/main/resources/log4j2-test-integration.yml new file mode 100644 index 00000000..e28b7e24 --- /dev/null +++ b/apps/user-service/src/main/resources/log4j2-test-integration.yml @@ -0,0 +1,77 @@ +Configuration: + name: test + + properties: + property: + - name: "log-path" + value: "./logs" + - name: "charset-UTF-8" + value: "UTF-8" + # 통일된 콘솔 패턴 - 모든 로그에 RequestId 포함 + - name: "console-layout-pattern" + value: "%highlight{[%-5level]} [%X{id}] %d{MM-dd HH:mm:ss} [%t] %n %msg%n%n" + + # [Appenders] 로그 기록방식 정의 + Appenders: + # 통일된 콘솔 출력 + Console: + name: console-appender + target: SYSTEM_OUT + PatternLayout: + pattern: ${console-layout-pattern} + + # [Loggers] 로그 출력 범위를 정의 + Loggers: + # [Loggers - Root] 모든 로그를 기록하는 최상위 로그를 정의 + Root: + level: OFF + AppenderRef: + - ref: console-appender + + # [Loggers - Loggers] 특정 패키지나 클래스에 대한 로그를 정의 + Logger: + # 1. Spring Framework 로그 + - name: org.springframework + additivity: "false" + level: INFO + AppenderRef: + - ref: console-appender + + # 2. 애플리케이션 로그 + - name: site.icebang + additivity: "false" + level: INFO + AppenderRef: + - ref: console-appender + + # 3. HikariCP 로그 비활성화 + - name: com.zaxxer.hikari + level: OFF + + # 4. Spring Security 로그 - 인증/인가 추적에 중요 + - name: org.springframework.security + level: INFO + additivity: "false" + AppenderRef: + - ref: console-appender + + # 5. 웹 요청 로그 - 요청 처리 과정 추적 + - name: org.springframework.web + level: INFO + additivity: "false" + AppenderRef: + - ref: console-appender + + # 6. 트랜잭션 로그 - DB 작업 추적 + - name: org.springframework.transaction + level: INFO + additivity: "false" + AppenderRef: + - ref: console-appender + + # 7. WORKFLOW_HISTORY 로그 - 워크플로우 기록 + - name: "WORKFLOW_HISTORY" + level: "INFO" + AppenderRef: + - ref: "console-appender" + additivity: "false" \ No newline at end of file diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/TaskExecutionServiceIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/TaskExecutionServiceIntegrationTest.java new file mode 100644 index 00000000..8308fe0d --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/TaskExecutionServiceIntegrationTest.java @@ -0,0 +1,60 @@ +package site.icebang.integration.tests.workflow; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.bean.override.mockito.MockitoBean; +import org.springframework.web.client.RestClientException; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + +import site.icebang.domain.workflow.model.Task; +import site.icebang.domain.workflow.model.TaskRun; +import site.icebang.domain.workflow.runner.TaskRunner; +import site.icebang.domain.workflow.runner.fastapi.FastApiTaskRunner; +import site.icebang.domain.workflow.service.TaskExecutionService; +import site.icebang.integration.setup.support.IntegrationTestSupport; + +/** + * TaskExecutionService의 재시도 로직에 대한 통합 테스트 클래스입니다. 실제 Spring 컨텍스트를 로드하여 RetryTemplate 기반의 재시도 기능이 정상 + * 동작하는지 검증합니다. + */ +public class TaskExecutionServiceIntegrationTest extends IntegrationTestSupport { + + @Autowired private TaskExecutionService taskExecutionService; + + @MockitoBean(name = "fastapiTaskRunner") + private FastApiTaskRunner mockFastApiTaskRunner; + + @Test + @DisplayName("Task 실행이 3번 모두 실패하면, 재시도 로그가 3번 기록되고 최종 FAILED 결과를 반환해야 한다") + void executeWithRetry_shouldLogRetries_andFail_afterAllRetries() { + // given + Task testTask = new Task(1L, "테스트 태스크", "FastAPI", null, null, null, null); + TaskRun testTaskRun = new TaskRun(); + ObjectNode testRequestBody = new ObjectMapper().createObjectNode(); + + // Mock Runner가 호출될 때마다 예외를 던지도록 설정 + when(mockFastApiTaskRunner.execute(any(Task.class), any(TaskRun.class), any(ObjectNode.class))) + .thenThrow(new RestClientException("Connection failed")); + + // when + // RetryTemplate이 적용된 실제 서비스를 호출 + TaskRunner.TaskExecutionResult finalResult = + taskExecutionService.executeWithRetry(testTask, testTaskRun, testRequestBody); + + // then + // 1. Runner의 execute 메소드가 RetryTemplate 정책에 따라 3번 호출되었는지 검증 + verify(mockFastApiTaskRunner, times(3)) + .execute(any(Task.class), any(TaskRun.class), any(ObjectNode.class)); + + // 2. RecoveryCallback이 반환한 최종 결과가 FAILED인지 검증 + assertThat(finalResult.isFailure()).isTrue(); + assertThat(finalResult.message()).contains("최대 재시도 횟수 초과"); + } +} diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowRunApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowRunApiIntegrationTest.java new file mode 100644 index 00000000..2daa4db1 --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowRunApiIntegrationTest.java @@ -0,0 +1,64 @@ +package site.icebang.integration.tests.workflow; + +import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; +import static com.epages.restdocs.apispec.ResourceDocumentation.resource; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; +import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; +import static org.springframework.restdocs.operation.preprocess.Preprocessors.*; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.http.MediaType; +import org.springframework.security.test.context.support.WithUserDetails; +import org.springframework.test.context.bean.override.mockito.MockitoBean; +import org.springframework.test.context.jdbc.Sql; +import org.springframework.transaction.annotation.Transactional; + +import com.epages.restdocs.apispec.ResourceSnippetParameters; + +import site.icebang.domain.workflow.service.WorkflowExecutionService; +import site.icebang.integration.setup.support.IntegrationTestSupport; + +@Sql( + value = {"classpath:sql/01-insert-internal-users.sql", "classpath:sql/03-insert-workflow.sql"}, + executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) +@Transactional +public class WorkflowRunApiIntegrationTest extends IntegrationTestSupport { + + @MockitoBean private WorkflowExecutionService mockWorkflowExecutionService; + + @Test + @DisplayName("워크플로우 수동 실행 API 호출 성공") + @WithUserDetails("admin@icebang.site") + void runWorkflow_success() throws Exception { + // given + Long workflowId = 1L; + + // when & then + mockMvc + .perform( + post(getApiUrlForDocs("/v0/workflows/{workflowId}/run"), workflowId) + .contentType(MediaType.APPLICATION_JSON) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isAccepted()) // 📌 1. 즉시 202 Accepted 응답을 받는지 확인 + .andDo( + document( + "workflow-run", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Workflow Execution") + .summary("워크플로우 수동 실행") + .description( + "지정된 ID의 워크플로우를 즉시 비동기적으로 실행합니다. " + + "성공 시 202 Accepted를 반환하며, 실제 실행은 백그라운드에서 진행됩니다.") + .build()))); + + // 📌 2. 비동기 호출된 executeWorkflow 메소드가 1초 이내에 1번 실행되었는지 검증 + verify(mockWorkflowExecutionService, timeout(1000).times(1)).executeWorkflow(workflowId); + } +} From 773b61f2af2e4870b7fbf3cde52f84c6a7235c72 Mon Sep 17 00:00:00 2001 From: bwnfo3 <142577603+bwnfo3@users.noreply.github.com> Date: Thu, 25 Sep 2025 22:24:23 +0900 Subject: [PATCH 10/18] =?UTF-8?q?Workflow=20=EC=83=9D=EC=84=B1=20api=20(#2?= =?UTF-8?q?11)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: WorkflowCreateDto 초안 * feat: Workflow생성 관련 메서드, sql 추가 * feat: Workflow생성 임시 post api * feat: WorkflowCreateE2eTest 초안 * feat: WorkflowController 워크플로우 생성 api * feat: description @Null 제거 * feat: 워크플로우 생성시 job,task 생성 메서드 추가 * feat: 워크플로우 생성시 job,task 생성 메서드 추가 * feat: 워크플로우 생성시 job,task 생성 메서드 구현중 * feat: 워크플로우 생성 e2eTest 작성중 * chore: spotlessApply * feat: job,task 생성 없이 워크플로우 생성으로만 수정 --- .../controller/WorkflowController.java | 22 ++ .../workflow/dto/WorkflowCreateDto.java | 112 +++++++++ .../workflow/mapper/WorkflowMapper.java | 11 + .../workflow/service/WorkflowService.java | 72 ++++++ .../mybatis/mapper/WorkflowMapper.xml | 57 +++++ .../scenario/WorkflowCreateFlowE2eTest.java | 219 ++++++++++++++++++ 6 files changed, 493 insertions(+) create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCreateDto.java create mode 100644 apps/user-service/src/test/java/site/icebang/e2e/scenario/WorkflowCreateFlowE2eTest.java diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java index fd42ea13..c98ece1f 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowController.java @@ -2,15 +2,20 @@ import java.math.BigInteger; +import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; +import org.springframework.security.core.annotation.AuthenticationPrincipal; import org.springframework.web.bind.annotation.*; +import jakarta.validation.Valid; import lombok.RequiredArgsConstructor; import site.icebang.common.dto.ApiResponse; import site.icebang.common.dto.PageParams; import site.icebang.common.dto.PageResult; +import site.icebang.domain.auth.model.AuthCredential; import site.icebang.domain.workflow.dto.WorkflowCardDto; +import site.icebang.domain.workflow.dto.WorkflowCreateDto; import site.icebang.domain.workflow.dto.WorkflowDetailCardDto; import site.icebang.domain.workflow.service.WorkflowExecutionService; import site.icebang.domain.workflow.service.WorkflowService; @@ -29,6 +34,23 @@ public ApiResponse> getWorkflowList( return ApiResponse.success(result); } + @PostMapping("") + @ResponseStatus(HttpStatus.CREATED) + public ApiResponse createWorkflow( + @Valid @RequestBody WorkflowCreateDto workflowCreateDto, + @AuthenticationPrincipal AuthCredential authCredential) { + // 인증 체크 + if (authCredential == null) { + throw new IllegalArgumentException("로그인이 필요합니다"); + } + + // AuthCredential에서 userId 추출 + BigInteger userId = authCredential.getId(); + + workflowService.createWorkflow(workflowCreateDto, userId); + return ApiResponse.success(null); + } + @PostMapping("/{workflowId}/run") public ResponseEntity runWorkflow(@PathVariable Long workflowId) { // HTTP 요청/응답 스레드를 블로킹하지 않도록 비동기 실행 diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCreateDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCreateDto.java new file mode 100644 index 00000000..bcd0cc56 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCreateDto.java @@ -0,0 +1,112 @@ +package site.icebang.domain.workflow.dto; + +import java.math.BigInteger; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import jakarta.validation.constraints.*; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * 워크플로우 생성 요청 DTO + * + *

프론트엔드에서 워크플로우 생성 시 필요한 모든 정보를 담는 DTO - 기본 정보: 이름, 설명 - 플랫폼 설정: 검색 플랫폼, 포스팅 플랫폼 - 계정 설정: 포스팅 계정 + * 정보 (JSON 형태로 저장) + */ +@Data +@Builder +@AllArgsConstructor +@NoArgsConstructor +public class WorkflowCreateDto { + @Null private BigInteger id; + + @NotBlank(message = "워크플로우 이름은 필수입니다") + @Size(max = 100, message = "워크플로우 이름은 100자를 초과할 수 없습니다") + @Pattern( + regexp = "^[가-힣a-zA-Z0-9\\s_-]+$", + message = "워크플로우 이름은 한글, 영문, 숫자, 공백, 언더스코어, 하이픈만 사용 가능합니다") + private String name; + + @Size(max = 500, message = "설명은 500자를 초과할 수 없습니다") + private String description; + + @Pattern(regexp = "^(naver|naver_store)?$", message = "검색 플랫폼은 'naver' 또는 'naver_store'만 가능합니다") + @JsonProperty("search_platform") + private String searchPlatform; + + @Pattern( + regexp = "^(naver_blog|tstory_blog|blogger)?$", + message = "포스팅 플랫폼은 'naver_blog', 'tstory_blog', 'blogger' 중 하나여야 합니다") + @JsonProperty("posting_platform") + private String postingPlatform; + + @Size(max = 100, message = "포스팅 계정 ID는 100자를 초과할 수 없습니다") + @JsonProperty("posting_account_id") + private String postingAccountId; + + @Size(max = 200, message = "포스팅 계정 비밀번호는 200자를 초과할 수 없습니다") + @JsonProperty("posting_account_password") + private String postingAccountPassword; + + @Size(max = 100, message = "블로그 이름은 100자를 초과할 수 없습니다") + @JsonProperty("blog_name") + private String blogName; + + @Builder.Default + @JsonProperty("is_enabled") + private Boolean isEnabled = true; + + // JSON 변환용 필드 (MyBatis에서 사용) + private String defaultConfigJson; + + public String genertateDefaultConfigJson() { + StringBuilder jsonBuilder = new StringBuilder(); + jsonBuilder.append("{"); + + // 크롤링 플랫폼 설정 (키: "1") + if (searchPlatform != null && !searchPlatform.isBlank()) { + jsonBuilder.append("\"1\": {\"tag\": \"").append(searchPlatform).append("\"}"); + } + + // 포스팅 설정 (키: "8") + if (hasPostingConfig()) { + if (jsonBuilder.length() > 1) { + jsonBuilder.append(", "); + } + jsonBuilder + .append("\"8\": {") + .append("\"tag\": \"") + .append(postingPlatform) + .append("\", ") + .append("\"blog_id\": \"") + .append(postingAccountId) + .append("\", ") + .append("\"blog_pw\": \"") + .append(postingAccountPassword) + .append("\""); + + // tstory_blog인 경우 blog_name 추가 + if ("tstory_blog".equals(postingPlatform) && blogName != null && !blogName.isBlank()) { + jsonBuilder.append(", \"blog_name\": \"").append(blogName).append("\""); + } + + jsonBuilder.append("}"); + } + + jsonBuilder.append("}"); + return jsonBuilder.toString(); + } + + // 포스팅 설정 완성도 체크 (상태 확인 유틸) + public boolean hasPostingConfig() { + return postingPlatform != null + && !postingPlatform.isBlank() + && postingAccountId != null + && !postingAccountId.isBlank() + && postingAccountPassword != null + && !postingAccountPassword.isBlank(); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java index 82381737..417dfd1d 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/mapper/WorkflowMapper.java @@ -13,6 +13,17 @@ public interface WorkflowMapper { int selectWorkflowCount(PageParams pageParams); + int insertWorkflow(Map params); // insert workflow + + // Job 생성 관련 메서드 + void insertJobs(Map params); // 여러 Job을 동적으로 생성 + + void insertWorkflowJobs(Map params); // Workflow-Job 연결 + + void insertJobTasks(Map params); // Job-Task 연결 + + boolean existsByName(String name); + WorkflowCardDto selectWorkflowById(BigInteger id); WorkflowDetailCardDto selectWorkflowDetailById(BigInteger workflowId); diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java index e8c857f3..06a9ee5c 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/service/WorkflowService.java @@ -1,6 +1,7 @@ package site.icebang.domain.workflow.service; import java.math.BigInteger; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -8,12 +9,14 @@ import org.springframework.transaction.annotation.Transactional; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; import site.icebang.common.dto.PageParams; import site.icebang.common.dto.PageResult; import site.icebang.common.service.PageableService; import site.icebang.domain.workflow.dto.ScheduleDto; import site.icebang.domain.workflow.dto.WorkflowCardDto; +import site.icebang.domain.workflow.dto.WorkflowCreateDto; import site.icebang.domain.workflow.dto.WorkflowDetailCardDto; import site.icebang.domain.workflow.mapper.WorkflowMapper; @@ -32,6 +35,7 @@ * @author jihu0210@naver.com * @since v0.1.0 */ +@Slf4j @Service @RequiredArgsConstructor public class WorkflowService implements PageableService { @@ -86,4 +90,72 @@ public WorkflowDetailCardDto getWorkflowDetail(BigInteger workflowId) { return workflow; } + + /** 워크플로우 생성 */ + @Transactional + public void createWorkflow(WorkflowCreateDto dto, BigInteger createdBy) { + // 1. 기본 검증 + validateBasicInput(dto, createdBy); + + // 2. 비즈니스 검증 + validateBusinessRules(dto); + + // 3. 중복체크 + if (workflowMapper.existsByName(dto.getName())) { + throw new IllegalArgumentException("이미 존재하는 워크플로우 이름입니다 : " + dto.getName()); + } + + // 4. 워크플로우 생성 + try { + // JSON 설정 생성 + String defaultConfigJson = dto.genertateDefaultConfigJson(); + dto.setDefaultConfigJson(defaultConfigJson); + + // DB 삽입 파라미터 구성 + Map params = new HashMap<>(); + params.put("dto", dto); + params.put("createdBy", createdBy); + + int result = workflowMapper.insertWorkflow(params); + if (result != 1) { + throw new RuntimeException("워크플로우 생성에 실패했습니다"); + } + + log.info("워크플로우 생성 완료: {} (생성자: {})", dto.getName(), createdBy); + + } catch (Exception e) { + log.error("워크플로우 생성 실패: {}", dto.getName(), e); + throw new RuntimeException("워크플로우 생성 중 오류가 발생했습니다", e); + } + } + + /** 기본 입력값 검증 */ + private void validateBasicInput(WorkflowCreateDto dto, BigInteger createdBy) { + if (dto == null) { + throw new IllegalArgumentException("워크플로우 정보가 필요합니다"); + } + if (createdBy == null) { + throw new IllegalArgumentException("생성자 정보가 필요합니다"); + } + } + + /** 비즈니스 규칙 검증 */ + private void validateBusinessRules(WorkflowCreateDto dto) { + // 포스팅 플랫폼 선택 시 계정 정보 필수 검증 + String postingPlatform = dto.getPostingPlatform(); + if (postingPlatform != null && !postingPlatform.isBlank()) { + if (dto.getPostingAccountId() == null || dto.getPostingAccountId().isBlank()) { + throw new IllegalArgumentException("포스팅 플랫폼 선택 시 계정 ID는 필수입니다"); + } + if (dto.getPostingAccountPassword() == null || dto.getPostingAccountPassword().isBlank()) { + throw new IllegalArgumentException("포스팅 플랫폼 선택 시 계정 비밀번호는 필수입니다"); + } + // 티스토리 블로그 추가 검증 + if ("tstory_blog".equals(postingPlatform)) { + if (dto.getBlogName() == null || dto.getBlogName().isBlank()) { + throw new IllegalArgumentException("티스토리 블로그 선택 시 블로그 이름은 필수입니다"); + } + } + } + } } diff --git a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml index 63a9f6db..dda398a9 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml @@ -129,4 +129,61 @@ wj.execution_order, j.id, j.name, j.description, j.is_enabled ORDER BY wj.execution_order + + + INSERT INTO workflow ( + name, + description, + is_enabled, + created_by, + created_at, + default_config + ) VALUES ( + #{dto.name}, + #{dto.description}, + #{dto.isEnabled}, + #{createdBy}, + NOW(), + #{dto.defaultConfigJson} + ) + + + + + + + + + SELECT LAST_INSERT_ID() as id + + INSERT INTO job (name, description, created_by, created_at) VALUES + ('상품 분석', '키워드 검색, 상품 크롤링 및 유사도 분석 작업', #{createdBy}, NOW()), + ('블로그 콘텐츠 생성', '분석 데이터를 기반으로 RAG 콘텐츠 생성 및 발행 작업', #{createdBy}, NOW()) + + + + + INSERT INTO workflow_job (workflow_id, job_id, execution_order) VALUES + (#{workflowId}, #{job1Id}, 1), + (#{workflowId}, #{job2Id}, 2) + + + + + INSERT INTO job_task (job_id, task_id, execution_order) VALUES + + (#{job1Id}, 1, 1), + (#{job1Id}, 2, 2), + (#{job1Id}, 3, 3), + (#{job1Id}, 4, 4), + (#{job1Id}, 5, 5), + (#{job1Id}, 6, 6), + + (#{job2Id}, 7, 1), + (#{job2Id}, 8, 2) + \ No newline at end of file diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/WorkflowCreateFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/WorkflowCreateFlowE2eTest.java new file mode 100644 index 00000000..115bec64 --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/WorkflowCreateFlowE2eTest.java @@ -0,0 +1,219 @@ +package site.icebang.e2e.scenario; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.http.*; +import org.springframework.test.context.jdbc.Sql; + +import site.icebang.e2e.setup.annotation.E2eTest; +import site.icebang.e2e.setup.support.E2eTestSupport; + +@Sql( + value = {"classpath:sql/00-truncate.sql", "classpath:sql/01-insert-internal-users.sql"}, + executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS) +@DisplayName("워크플로우 생성 플로우 E2E 테스트") +@E2eTest +class WorkflowCreateFlowE2eTest extends E2eTestSupport { + + @SuppressWarnings("unchecked") + @Test + @DisplayName("사용자가 새 워크플로우를 생성하는 전체 플로우") + void completeWorkflowCreateFlow() throws Exception { + logStep(1, "사용자 로그인"); + + // 1. 로그인 (세션에 userId 저장) + Map loginRequest = new HashMap<>(); + loginRequest.put("email", "admin@icebang.site"); + loginRequest.put("password", "qwer1234!A"); + + HttpHeaders loginHeaders = new HttpHeaders(); + loginHeaders.setContentType(MediaType.APPLICATION_JSON); + loginHeaders.set("Origin", "https://admin.icebang.site"); + loginHeaders.set("Referer", "https://admin.icebang.site/"); + + HttpEntity> loginEntity = new HttpEntity<>(loginRequest, loginHeaders); + + ResponseEntity loginResponse = + restTemplate.postForEntity(getV0ApiUrl("/auth/login"), loginEntity, Map.class); + + assertThat(loginResponse.getStatusCode()).isEqualTo(HttpStatus.OK); + assertThat((Boolean) loginResponse.getBody().get("success")).isTrue(); + + logSuccess("사용자 로그인 성공 - 세션 쿠키 자동 저장됨"); + logDebug("현재 세션 쿠키: " + getSessionCookies()); + + logStep(2, "네이버 블로그 워크플로우 생성"); + + // 2. 네이버 블로그 워크플로우 생성 + Map naverBlogWorkflow = new HashMap<>(); + naverBlogWorkflow.put("name", "상품 분석 및 네이버 블로그 자동 발행"); + naverBlogWorkflow.put("description", "키워드 검색부터 상품 분석 후 네이버 블로그 발행까지의 자동화 프로세스"); + naverBlogWorkflow.put("search_platform", "naver"); + naverBlogWorkflow.put("posting_platform", "naver_blog"); + naverBlogWorkflow.put("posting_account_id", "test_naver_blog"); + naverBlogWorkflow.put("posting_account_password", "naver_password123"); + naverBlogWorkflow.put("is_enabled", true); + + HttpHeaders workflowHeaders = new HttpHeaders(); + workflowHeaders.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> naverEntity = + new HttpEntity<>(naverBlogWorkflow, workflowHeaders); + + ResponseEntity naverResponse = + restTemplate.postForEntity(getV0ApiUrl("/workflows"), naverEntity, Map.class); + + assertThat(naverResponse.getStatusCode()).isEqualTo(HttpStatus.CREATED); + assertThat((Boolean) naverResponse.getBody().get("success")).isTrue(); + + logSuccess("네이버 블로그 워크플로우 생성 성공"); + + logStep(3, "티스토리 블로그 워크플로우 생성 (블로그명 포함)"); + + // 3. 티스토리 블로그 워크플로우 생성 (블로그명 필수) + Map tstoryWorkflow = new HashMap<>(); + tstoryWorkflow.put("name", "티스토리 자동 발행 워크플로우"); + tstoryWorkflow.put("description", "티스토리 블로그 자동 포스팅"); + tstoryWorkflow.put("search_platform", "naver"); + tstoryWorkflow.put("posting_platform", "tstory_blog"); + tstoryWorkflow.put("posting_account_id", "test_tstory"); + tstoryWorkflow.put("posting_account_password", "tstory_password123"); + tstoryWorkflow.put("blog_name", "my-tech-blog"); // 티스토리는 블로그명 필수 + tstoryWorkflow.put("is_enabled", true); + + HttpEntity> tstoryEntity = + new HttpEntity<>(tstoryWorkflow, workflowHeaders); + + ResponseEntity tstoryResponse = + restTemplate.postForEntity(getV0ApiUrl("/workflows"), tstoryEntity, Map.class); + + assertThat(tstoryResponse.getStatusCode()).isEqualTo(HttpStatus.CREATED); + assertThat((Boolean) tstoryResponse.getBody().get("success")).isTrue(); + + logSuccess("티스토리 워크플로우 생성 성공"); + + logStep(4, "검색만 하는 워크플로우 생성 (포스팅 없음)"); + + // 4. 포스팅 없는 검색 전용 워크플로우 (추후 예정) + Map searchOnlyWorkflow = new HashMap<>(); + searchOnlyWorkflow.put("name", "검색 전용 워크플로우"); + searchOnlyWorkflow.put("description", "상품 검색 및 분석만 수행"); + searchOnlyWorkflow.put("search_platform", "naver"); + searchOnlyWorkflow.put("is_enabled", true); + // posting_platform, posting_account_id, posting_account_password는 선택사항 + + HttpEntity> searchOnlyEntity = + new HttpEntity<>(searchOnlyWorkflow, workflowHeaders); + + ResponseEntity searchOnlyResponse = + restTemplate.postForEntity(getV0ApiUrl("/workflows"), searchOnlyEntity, Map.class); + + assertThat(searchOnlyResponse.getStatusCode()).isEqualTo(HttpStatus.CREATED); + assertThat((Boolean) searchOnlyResponse.getBody().get("success")).isTrue(); + + logSuccess("검색 전용 워크플로우 생성 성공"); + + logCompletion("워크플로우 생성 플로우 완료"); + } + + @Test + @DisplayName("중복된 이름으로 워크플로우 생성 시도 시 실패") + void createWorkflow_withDuplicateName_shouldFail() { + // 선행 조건: 로그인 + performUserLogin(); + + logStep(1, "첫 번째 워크플로우 생성"); + + // 첫 번째 워크플로우 생성 + Map firstWorkflow = new HashMap<>(); + firstWorkflow.put("name", "중복테스트워크플로우"); + firstWorkflow.put("search_platform", "naver"); + firstWorkflow.put("is_enabled", true); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> firstEntity = new HttpEntity<>(firstWorkflow, headers); + + ResponseEntity firstResponse = + restTemplate.postForEntity(getV0ApiUrl("/workflows"), firstEntity, Map.class); + + assertThat(firstResponse.getStatusCode()).isEqualTo(HttpStatus.CREATED); + logSuccess("첫 번째 워크플로우 생성 성공"); + + logStep(2, "동일한 이름으로 두 번째 워크플로우 생성 시도"); + + // 동일한 이름으로 다시 생성 시도 + Map duplicateWorkflow = new HashMap<>(); + duplicateWorkflow.put("name", "중복테스트워크플로우"); // 동일한 이름 + duplicateWorkflow.put("search_platform", "naver_store"); + duplicateWorkflow.put("is_enabled", true); + + HttpEntity> duplicateEntity = new HttpEntity<>(duplicateWorkflow, headers); + + ResponseEntity duplicateResponse = + restTemplate.postForEntity(getV0ApiUrl("/workflows"), duplicateEntity, Map.class); + + // 중복 이름 처리 확인 (400 또는 409 예상) + assertThat(duplicateResponse.getStatusCode()) + .isIn(HttpStatus.BAD_REQUEST, HttpStatus.CONFLICT, HttpStatus.INTERNAL_SERVER_ERROR); + + logSuccess("중복 이름 워크플로우 생성 차단 확인"); + } + + @Test + @DisplayName("필수 필드 누락 시 워크플로우 생성 실패") + void createWorkflow_withMissingRequiredFields_shouldFail() { + // 선행 조건: 로그인 + performUserLogin(); + + logStep(1, "워크플로우 이름 없이 생성 시도"); + + // 이름 없는 요청 + Map noNameWorkflow = new HashMap<>(); + noNameWorkflow.put("search_platform", "naver"); + noNameWorkflow.put("is_enabled", true); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> entity = new HttpEntity<>(noNameWorkflow, headers); + + ResponseEntity response = + restTemplate.postForEntity(getV0ApiUrl("/workflows"), entity, Map.class); + + assertThat(response.getStatusCode()) + .isIn(HttpStatus.BAD_REQUEST, HttpStatus.UNPROCESSABLE_ENTITY); + + logSuccess("필수 필드 검증 확인"); + } + + /** 사용자 로그인을 수행하는 헬퍼 메서드 */ + private void performUserLogin() { + Map loginRequest = new HashMap<>(); + loginRequest.put("email", "admin@icebang.site"); + loginRequest.put("password", "qwer1234!A"); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.set("Origin", "https://admin.icebang.site"); + headers.set("Referer", "https://admin.icebang.site/"); + + HttpEntity> entity = new HttpEntity<>(loginRequest, headers); + + ResponseEntity response = + restTemplate.postForEntity(getV0ApiUrl("/auth/login"), entity, Map.class); + + if (response.getStatusCode() != HttpStatus.OK) { + logError("사용자 로그인 실패: " + response.getStatusCode()); + throw new RuntimeException("User login failed"); + } + + logSuccess("사용자 로그인 완료"); + } +} From f8b70269935e8e87eb62654ab2eb733c605d2f08 Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Thu, 25 Sep 2025 22:55:58 +0900 Subject: [PATCH 11/18] =?UTF-8?q?Timezone=20Instant(UTC)=20=EB=A7=88?= =?UTF-8?q?=EC=9D=B4=EA=B7=B8=EB=A0=88=EC=9D=B4=EC=85=98=20(#210)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor: UTC 기반으로 변경 * refactor: data, schema sql 분리 * fix: IntegrationTest fix * test: E2e, integration test UTC 검증 --- .../domain/schedule/model/Schedule.java | 8 +- .../icebang/domain/workflow/dto/JobDto.java | 6 +- .../domain/workflow/dto/ScheduleDto.java | 6 +- .../icebang/domain/workflow/dto/TaskDto.java | 6 +- .../domain/workflow/dto/WorkflowCardDto.java | 4 +- .../workflow/dto/WorkflowDetailCardDto.java | 4 +- .../workflow/dto/WorkflowHistoryDTO.java | 6 +- .../icebang/domain/workflow/model/Job.java | 6 +- .../icebang/domain/workflow/model/JobRun.java | 12 +- .../icebang/domain/workflow/model/Task.java | 6 +- .../domain/workflow/model/TaskRun.java | 14 +- .../domain/workflow/model/Workflow.java | 6 +- .../domain/workflow/model/WorkflowRun.java | 12 +- .../typehandler/InstantTypeHandler.java | 94 +++++++ .../main/resources/application-develop.yml | 18 +- .../main/resources/application-production.yml | 2 +- .../main/resources/application-test-e2e.yml | 12 +- .../application-test-integration.yml | 8 +- .../main/resources/application-test-unit.yml | 8 +- .../src/main/resources/application.yml | 4 + .../resources/mybatis/mapper/JobMapper.xml | 8 +- .../resources/mybatis/mapper/JobRunMapper.xml | 6 +- .../mybatis/mapper/ScheduleMapper.xml | 12 + .../resources/mybatis/mapper/TaskMapper.xml | 2 + .../mybatis/mapper/TaskRunMapper.xml | 13 + .../mybatis/mapper/WorkflowMapper.xml | 14 +- .../mybatis/mapper/WorkflowRunMapper.xml | 6 +- .../resources/sql/{ => data}/00-truncate.sql | 0 .../sql/data/01-insert-internal-users-h2.sql | 229 ++++++++++++++++ .../{ => data}/01-insert-internal-users.sql | 0 .../{ => data}/02-insert-external-users.sql | 0 .../sql/data/03-insert-workflow-h2.sql | 110 ++++++++ .../sql/{ => data}/03-insert-workflow.sql | 8 +- .../data/04-insert-workflow-history-h2.sql | 76 ++++++ .../{ => data}/04-insert-workflow-history.sql | 4 +- .../sql/data/05-fix-timezone-data-h2.sql | 33 +++ .../sql/data/05-fix-timezone-data.sql | 250 ++++++++++++++++++ .../resources/sql/{ => schema}/00-drop-h2.sql | 0 .../sql/{ => schema}/00-drop-maria.sql | 0 .../resources/sql/{ => schema}/01-schema.sql | 0 .../sql/{ => schema}/02-quartz-schema.sql | 0 .../sql/schema/03-schema-h2-timezone.sql | 51 ++++ .../sql/schema/03-schema-mariadb-timezone.sql | 49 ++++ .../e2e/scenario/UserLogoutFlowE2eTest.java | 5 +- .../scenario/UserRegistrationFlowE2eTest.java | 5 +- .../scenario/WorkflowCreateFlowE2eTest.java | 82 +++++- .../setup/config/E2eTestConfiguration.java | 2 +- .../tests/auth/AuthApiIntegrationTest.java | 2 +- .../OrganizationApiIntegrationTest.java | 4 +- .../WorkflowHistoryApiIntegrationTest.java | 75 +++++- .../WorkflowRunApiIntegrationTest.java | 5 +- 51 files changed, 1194 insertions(+), 99 deletions(-) create mode 100644 apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/InstantTypeHandler.java rename apps/user-service/src/main/resources/sql/{ => data}/00-truncate.sql (100%) create mode 100644 apps/user-service/src/main/resources/sql/data/01-insert-internal-users-h2.sql rename apps/user-service/src/main/resources/sql/{ => data}/01-insert-internal-users.sql (100%) rename apps/user-service/src/main/resources/sql/{ => data}/02-insert-external-users.sql (100%) create mode 100644 apps/user-service/src/main/resources/sql/data/03-insert-workflow-h2.sql rename apps/user-service/src/main/resources/sql/{ => data}/03-insert-workflow.sql (97%) create mode 100644 apps/user-service/src/main/resources/sql/data/04-insert-workflow-history-h2.sql rename apps/user-service/src/main/resources/sql/{ => data}/04-insert-workflow-history.sql (96%) create mode 100644 apps/user-service/src/main/resources/sql/data/05-fix-timezone-data-h2.sql create mode 100644 apps/user-service/src/main/resources/sql/data/05-fix-timezone-data.sql rename apps/user-service/src/main/resources/sql/{ => schema}/00-drop-h2.sql (100%) rename apps/user-service/src/main/resources/sql/{ => schema}/00-drop-maria.sql (100%) rename apps/user-service/src/main/resources/sql/{ => schema}/01-schema.sql (100%) rename apps/user-service/src/main/resources/sql/{ => schema}/02-quartz-schema.sql (100%) create mode 100644 apps/user-service/src/main/resources/sql/schema/03-schema-h2-timezone.sql create mode 100644 apps/user-service/src/main/resources/sql/schema/03-schema-mariadb-timezone.sql diff --git a/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java index c2218bd0..cce15a25 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java +++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java @@ -1,6 +1,6 @@ package site.icebang.domain.schedule.model; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.AccessLevel; import lombok.AllArgsConstructor; @@ -22,10 +22,10 @@ public class Schedule { private String parameters; // JSON format private boolean isActive; private String lastRunStatus; - private LocalDateTime lastRunAt; - private LocalDateTime createdAt; + private Instant lastRunAt; + private Instant createdAt; private Long createdBy; - private LocalDateTime updatedAt; + private Instant updatedAt; private Long updatedBy; private String scheduleText; } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/JobDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/JobDto.java index 6dd40c5d..035d6d17 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/JobDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/JobDto.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.dto; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.Data; @@ -10,9 +10,9 @@ public class JobDto { private String name; private String description; private Boolean isEnabled; - private LocalDateTime createdAt; + private Instant createdAt; private Long createdBy; - private LocalDateTime updatedAt; + private Instant updatedAt; private Long updatedBy; private Integer executionOrder; diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ScheduleDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ScheduleDto.java index 397285cb..752bd619 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ScheduleDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ScheduleDto.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.dto; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.Data; @@ -10,7 +10,7 @@ public class ScheduleDto { private String cronExpression; private Boolean isActive; private String lastRunStatus; - private LocalDateTime lastRunAt; + private Instant lastRunAt; private String scheduleText; - private LocalDateTime createdAt; + private Instant createdAt; } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskDto.java index fa83fe7d..1047d141 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskDto.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.dto; -import java.time.LocalDateTime; +import java.time.Instant; import com.fasterxml.jackson.databind.JsonNode; @@ -14,6 +14,6 @@ public class TaskDto { private Integer executionOrder; private JsonNode settings; private JsonNode parameters; - private LocalDateTime createdAt; - private LocalDateTime updatedAt; + private Instant createdAt; + private Instant updatedAt; } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java index a39ce0c3..4d074930 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java @@ -1,7 +1,7 @@ package site.icebang.domain.workflow.dto; import java.math.BigInteger; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.Data; @@ -12,5 +12,5 @@ public class WorkflowCardDto { private String description; private boolean isEnabled; private String createdBy; - private LocalDateTime createdAt; + private Instant createdAt; } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowDetailCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowDetailCardDto.java index a2ef46b8..175db6ac 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowDetailCardDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowDetailCardDto.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.dto; -import java.time.LocalDateTime; +import java.time.Instant; import java.util.List; import java.util.Map; @@ -9,7 +9,7 @@ @Data public class WorkflowDetailCardDto extends WorkflowCardDto { private String defaultConfig; - private LocalDateTime updatedAt; + private Instant updatedAt; private String updatedBy; private List schedules; private List> jobs; diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowHistoryDTO.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowHistoryDTO.java index 18a25b7e..9f5a9b8d 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowHistoryDTO.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowHistoryDTO.java @@ -1,7 +1,7 @@ package site.icebang.domain.workflow.dto; import java.math.BigInteger; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.Data; @@ -11,8 +11,8 @@ public class WorkflowHistoryDTO { private BigInteger id; private BigInteger workflowId; private String traceId; - private LocalDateTime startedAt; - private LocalDateTime finishedAt; + private Instant startedAt; + private Instant finishedAt; private BigInteger createdBy; private String triggerType; private String runNumber; diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java index f0d36d8b..c363f8de 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.model; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.AccessLevel; import lombok.AllArgsConstructor; @@ -17,9 +17,9 @@ public class Job { private String name; private String description; private boolean isEnabled; - private LocalDateTime createdAt; + private Instant createdAt; private Long createdBy; - private LocalDateTime updatedAt; + private Instant updatedAt; private Long updatedBy; public Job(JobDto dto) { diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/JobRun.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/JobRun.java index 038890dc..eeaffd28 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/JobRun.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/JobRun.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.model; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.Getter; import lombok.NoArgsConstructor; @@ -13,15 +13,15 @@ public class JobRun { private Long workflowRunId; private Long jobId; private String status; // PENDING, RUNNING, SUCCESS, FAILED - private LocalDateTime startedAt; - private LocalDateTime finishedAt; - private LocalDateTime createdAt; + private Instant startedAt; + private Instant finishedAt; + private Instant createdAt; private JobRun(Long workflowRunId, Long jobId) { this.workflowRunId = workflowRunId; this.jobId = jobId; this.status = "RUNNING"; - this.startedAt = LocalDateTime.now(); + this.startedAt = Instant.now(); this.createdAt = this.startedAt; } @@ -33,6 +33,6 @@ public static JobRun start(Long workflowRunId, Long jobId) { /** Job 실행 완료 처리 */ public void finish(String status) { this.status = status; - this.finishedAt = LocalDateTime.now(); + this.finishedAt = Instant.now(); } } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java index 2c917100..04d577c1 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.model; -import java.time.LocalDateTime; +import java.time.Instant; import com.fasterxml.jackson.databind.JsonNode; @@ -26,9 +26,9 @@ public class Task { private JsonNode settings; - private LocalDateTime createdAt; + private Instant createdAt; - private LocalDateTime updatedAt; + private Instant updatedAt; public Task(TaskDto taskDto) { this.id = taskDto.getId(); diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/TaskRun.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/TaskRun.java index d49542f0..6d89a150 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/TaskRun.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/TaskRun.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.model; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.Getter; import lombok.NoArgsConstructor; @@ -15,16 +15,16 @@ public class TaskRun { private Integer executionOrder; private String status; // PENDING, RUNNING, SUCCESS, FAILED private String resultMessage; // 실행 결과 메시지 - private LocalDateTime startedAt; - private LocalDateTime finishedAt; - private LocalDateTime createdAt; + private Instant startedAt; + private Instant finishedAt; + private Instant createdAt; // 생성자나 정적 팩토리 메서드를 통해 객체 생성 로직을 관리 private TaskRun(Long jobRunId, Long taskId) { this.jobRunId = jobRunId; this.taskId = taskId; this.status = "PENDING"; - this.createdAt = LocalDateTime.now(); + this.createdAt = Instant.now(); } /** Task 실행 시작을 위한 정적 팩토리 메서드 */ @@ -32,7 +32,7 @@ public static TaskRun start(Long jobRunId, Long taskId, Integer executionOrder) TaskRun taskRun = new TaskRun(jobRunId, taskId); taskRun.executionOrder = executionOrder; taskRun.status = "RUNNING"; - taskRun.startedAt = LocalDateTime.now(); + taskRun.startedAt = Instant.now(); return taskRun; } @@ -40,6 +40,6 @@ public static TaskRun start(Long jobRunId, Long taskId, Integer executionOrder) public void finish(String status, String resultMessage) { this.status = status; this.resultMessage = resultMessage; - this.finishedAt = LocalDateTime.now(); + this.finishedAt = Instant.now(); } } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java index 8b536003..695364aa 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.model; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.AccessLevel; import lombok.AllArgsConstructor; @@ -16,9 +16,9 @@ public class Workflow { private String name; private String description; private boolean isEnabled; - private LocalDateTime createdAt; + private Instant createdAt; private Long createdBy; - private LocalDateTime updatedAt; + private Instant updatedAt; private Long updatedBy; /** 워크플로우별 기본 설정값 (JSON) */ diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/WorkflowRun.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/WorkflowRun.java index 011f7ee5..5741e77b 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/WorkflowRun.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/WorkflowRun.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.model; -import java.time.LocalDateTime; +import java.time.Instant; import java.util.UUID; import lombok.Getter; @@ -14,15 +14,15 @@ public class WorkflowRun { private Long workflowId; private String traceId; // 분산 추적을 위한 ID private String status; // PENDING, RUNNING, SUCCESS, FAILED - private LocalDateTime startedAt; - private LocalDateTime finishedAt; - private LocalDateTime createdAt; + private Instant startedAt; + private Instant finishedAt; + private Instant createdAt; private WorkflowRun(Long workflowId) { this.workflowId = workflowId; this.traceId = UUID.randomUUID().toString(); // 고유 추적 ID 생성 this.status = "RUNNING"; - this.startedAt = LocalDateTime.now(); + this.startedAt = Instant.now(); this.createdAt = this.startedAt; } @@ -34,6 +34,6 @@ public static WorkflowRun start(Long workflowId) { /** 워크플로우 실행 완료 처리 */ public void finish(String status) { this.status = status; - this.finishedAt = LocalDateTime.now(); + this.finishedAt = Instant.now(); } } diff --git a/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/InstantTypeHandler.java b/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/InstantTypeHandler.java new file mode 100644 index 00000000..4146c4af --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/InstantTypeHandler.java @@ -0,0 +1,94 @@ +package site.icebang.global.config.mybatis.typehandler; + +import java.sql.CallableStatement; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.time.Instant; + +import org.apache.ibatis.type.BaseTypeHandler; +import org.apache.ibatis.type.JdbcType; +import org.apache.ibatis.type.MappedTypes; + +/** + * MyBatis에서 Java 8의 {@code Instant} 타입을 데이터베이스의 TIMESTAMP 타입과 매핑하기 위한 커스텀 타입 핸들러입니다. + * + *

이 핸들러를 통해 애플리케이션에서는 UTC 기준의 시간을 {@code Instant} 객체로 다루고, 데이터베이스에는 해당 객체를 TIMESTAMP 형태로 저장하거나 + * 읽어올 수 있습니다. + * + *

MyBatis XML 매퍼에서의 사용 예제:

+ * + *
{@code
+ * 
+ *     
+ * 
+ * }
+ * + * @author jihu0210@naver.com + * @since v0.1.0 + */ +@MappedTypes(Instant.class) +public class InstantTypeHandler extends BaseTypeHandler { + + /** + * {@code Instant} 파라미터를 DB에 저장하기 위해 Timestamp로 변환하여 PreparedStatement에 설정합니다. + * + * @param ps PreparedStatement 객체 + * @param i 파라미터 인덱스 + * @param parameter 변환할 Instant 객체 + * @param jdbcType JDBC 타입 + * @throws SQLException 변환 실패 시 + */ + @Override + public void setNonNullParameter(PreparedStatement ps, int i, Instant parameter, JdbcType jdbcType) + throws SQLException { + ps.setTimestamp(i, Timestamp.from(parameter)); + } + + /** + * ResultSet에서 컬럼 이름으로 Timestamp를 가져와 {@code Instant} 객체로 변환합니다. + * + * @param rs ResultSet 객체 + * @param columnName 컬럼 이름 + * @return 변환된 Instant 객체, 원본이 null이면 null + * @throws SQLException 변환 실패 시 + */ + @Override + public Instant getNullableResult(ResultSet rs, String columnName) throws SQLException { + Timestamp timestamp = rs.getTimestamp(columnName); + return timestamp != null ? timestamp.toInstant() : null; + } + + /** + * ResultSet에서 컬럼 인덱스로 Timestamp를 가져와 {@code Instant} 객체로 변환합니다. + * + * @param rs ResultSet 객체 + * @param columnIndex 컬럼 인덱스 + * @return 변환된 Instant 객체, 원본이 null이면 null + * @throws SQLException 변환 실패 시 + */ + @Override + public Instant getNullableResult(ResultSet rs, int columnIndex) throws SQLException { + Timestamp timestamp = rs.getTimestamp(columnIndex); + return timestamp != null ? timestamp.toInstant() : null; + } + + /** + * CallableStatement에서 컬럼 인덱스로 Timestamp를 가져와 {@code Instant} 객체로 변환합니다. + * + * @param cs CallableStatement 객체 + * @param columnIndex 컬럼 인덱스 + * @return 변환된 Instant 객체, 원본이 null이면 null + * @throws SQLException 변환 실패 시 + */ + @Override + public Instant getNullableResult(CallableStatement cs, int columnIndex) throws SQLException { + Timestamp timestamp = cs.getTimestamp(columnIndex); + return timestamp != null ? timestamp.toInstant() : null; + } +} diff --git a/apps/user-service/src/main/resources/application-develop.yml b/apps/user-service/src/main/resources/application-develop.yml index 9de00956..64e1a0be 100644 --- a/apps/user-service/src/main/resources/application-develop.yml +++ b/apps/user-service/src/main/resources/application-develop.yml @@ -28,7 +28,7 @@ spring: auto-startup: true # 📌 Quartz 전용 DataSource 설정을 여기에 추가 datasource: - url: jdbc:mariadb://localhost:3306/pre_process + url: jdbc:mariadb://localhost:3306/pre_process?serverTimezone=UTC username: mariadb password: qwer1234 driver-class-name: org.mariadb.jdbc.Driver @@ -44,14 +44,16 @@ spring: init: mode: always schema-locations: - - classpath:sql/00-drop-maria.sql - - classpath:sql/01-schema.sql - - classpath:sql/02-quartz-schema.sql + - classpath:sql/schema/00-drop-maria.sql + - classpath:sql/schema/01-schema.sql + - classpath:sql/schema/02-quartz-schema.sql + - classpath:sql/schema/03-schema-mariadb-timezone.sql data-locations: - - classpath:sql/00-truncate.sql - - classpath:sql/01-insert-internal-users.sql - - classpath:sql/02-insert-external-users.sql - - classpath:sql/03-insert-workflow.sql + - classpath:sql/data/00-truncate.sql + - classpath:sql/data/01-insert-internal-users.sql + - classpath:sql/data/02-insert-external-users.sql + - classpath:sql/data/03-insert-workflow.sql + - classpath:sql/data/05-fix-timezone-data.sql encoding: UTF-8 mybatis: diff --git a/apps/user-service/src/main/resources/application-production.yml b/apps/user-service/src/main/resources/application-production.yml index 406fed87..c53e00bb 100644 --- a/apps/user-service/src/main/resources/application-production.yml +++ b/apps/user-service/src/main/resources/application-production.yml @@ -4,7 +4,7 @@ spring: on-profile: production datasource: - url: jdbc:mariadb://${DB_HOST}:${DB_PORT}/${DB_NAME} + url: jdbc:mariadb://${DB_HOST}:${DB_PORT}/${DB_NAME}?serverTimezone=UTC username: ${DB_USER} password: ${DB_PASS} driver-class-name: org.mariadb.jdbc.Driver diff --git a/apps/user-service/src/main/resources/application-test-e2e.yml b/apps/user-service/src/main/resources/application-test-e2e.yml index 3a777909..14c572b1 100644 --- a/apps/user-service/src/main/resources/application-test-e2e.yml +++ b/apps/user-service/src/main/resources/application-test-e2e.yml @@ -7,8 +7,16 @@ spring: init: mode: always schema-locations: - - classpath:sql/00-drop-maria.sql - - classpath:sql/01-schema.sql + - classpath:sql/schema/00-drop-maria.sql + - classpath:sql/schema/01-schema.sql + - classpath:sql/schema/02-quartz-schema.sql + - classpath:sql/schema/03-schema-mariadb-timezone.sql + data-locations: + - classpath:sql/data/00-truncate.sql + - classpath:sql/data/01-insert-internal-users.sql + - classpath:sql/data/02-insert-external-users.sql + - classpath:sql/data/03-insert-workflow.sql + - classpath:sql/data/05-fix-timezone-data.sql encoding: UTF-8 mybatis: diff --git a/apps/user-service/src/main/resources/application-test-integration.yml b/apps/user-service/src/main/resources/application-test-integration.yml index 6eccdace..0bc7cbcc 100644 --- a/apps/user-service/src/main/resources/application-test-integration.yml +++ b/apps/user-service/src/main/resources/application-test-integration.yml @@ -10,7 +10,7 @@ spring: password: driver-class-name: org.h2.Driver hikari: - connection-init-sql: "SET MODE MariaDB; SET NON_KEYWORDS USER;" + connection-init-sql: "SET MODE MariaDB; SET NON_KEYWORDS USER; " connection-timeout: 30000 idle-timeout: 600000 max-lifetime: 1800000 @@ -28,8 +28,10 @@ spring: init: mode: always schema-locations: - - classpath:sql/00-drop-h2.sql - - classpath:sql/01-schema.sql + - classpath:sql/schema/00-drop-h2.sql + - classpath:sql/schema/01-schema.sql + - classpath:sql/schema/02-quartz-schema.sql + - classpath:sql/schema/03-schema-h2-timezone.sql encoding: UTF-8 mybatis: diff --git a/apps/user-service/src/main/resources/application-test-unit.yml b/apps/user-service/src/main/resources/application-test-unit.yml index d9a8059b..1487e336 100644 --- a/apps/user-service/src/main/resources/application-test-unit.yml +++ b/apps/user-service/src/main/resources/application-test-unit.yml @@ -11,7 +11,7 @@ spring: password: driver-class-name: org.h2.Driver hikari: - connection-init-sql: "SET MODE MariaDB" + connection-init-sql: "SET MODE MariaDB " connection-timeout: 30000 idle-timeout: 600000 max-lifetime: 1800000 @@ -29,8 +29,10 @@ spring: init: mode: always schema-locations: - - classpath:sql/00-drop-h2.sql - - classpath:sql/01-schema.sql + - classpath:sql/schema/00-drop-h2.sql + - classpath:sql/schema/01-schema.sql + - classpath:sql/schema/02-quartz-schema.sql + - classpath:sql/schema/03-schema-h2-timezone.sql encoding: UTF-8 mybatis: diff --git a/apps/user-service/src/main/resources/application.yml b/apps/user-service/src/main/resources/application.yml index fbda82f3..55fece16 100644 --- a/apps/user-service/src/main/resources/application.yml +++ b/apps/user-service/src/main/resources/application.yml @@ -7,6 +7,10 @@ spring: context: cache: maxSize: 1 + jackson: + time-zone: UTC + serialization: + write-dates-as-timestamps: false mybatis: # Mapper XML 파일 위치 diff --git a/apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml index cd64ad2c..5b959db3 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml @@ -8,8 +8,8 @@ - - + + @@ -18,8 +18,8 @@ - - + + diff --git a/apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml index 3a0e17bd..2cc51d78 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml @@ -8,9 +8,9 @@ - - - + + + diff --git a/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml index 2a5480e3..80d6ffae 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml @@ -3,6 +3,18 @@ + + + + + + + + + + + + diff --git a/apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml index 8fb277e2..61ec3cf0 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml @@ -2,6 +2,19 @@ + + + + + + + + + + + + + INSERT INTO task_run (job_run_id, task_id, execution_order, status, started_at, created_at) VALUES (#{jobRunId}, #{taskId}, #{executionOrder}, #{status}, #{startedAt}, #{createdAt}) diff --git a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml index dda398a9..ea5a0d01 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml @@ -47,9 +47,9 @@ - + - + @@ -58,9 +58,9 @@ - + - + @@ -143,7 +143,7 @@ #{dto.description}, #{dto.isEnabled}, #{createdBy}, - NOW(), + UTC_TIMESTAMP(), #{dto.defaultConfigJson} ) @@ -161,8 +161,8 @@ SELECT LAST_INSERT_ID() as id INSERT INTO job (name, description, created_by, created_at) VALUES - ('상품 분석', '키워드 검색, 상품 크롤링 및 유사도 분석 작업', #{createdBy}, NOW()), - ('블로그 콘텐츠 생성', '분석 데이터를 기반으로 RAG 콘텐츠 생성 및 발행 작업', #{createdBy}, NOW()) + ('상품 분석', '키워드 검색, 상품 크롤링 및 유사도 분석 작업', #{createdBy}, UTC_TIMESTAMP()), + ('블로그 콘텐츠 생성', '분석 데이터를 기반으로 RAG 콘텐츠 생성 및 발행 작업', #{createdBy}, UTC_TIMESTAMP()) diff --git a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml index d032da56..8011fc6c 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml @@ -8,9 +8,9 @@ - - - + + + diff --git a/apps/user-service/src/main/resources/sql/00-truncate.sql b/apps/user-service/src/main/resources/sql/data/00-truncate.sql similarity index 100% rename from apps/user-service/src/main/resources/sql/00-truncate.sql rename to apps/user-service/src/main/resources/sql/data/00-truncate.sql diff --git a/apps/user-service/src/main/resources/sql/data/01-insert-internal-users-h2.sql b/apps/user-service/src/main/resources/sql/data/01-insert-internal-users-h2.sql new file mode 100644 index 00000000..88108427 --- /dev/null +++ b/apps/user-service/src/main/resources/sql/data/01-insert-internal-users-h2.sql @@ -0,0 +1,229 @@ +-- icebang 내부 직원 전체 INSERT (H2 호환 버전) + +-- 1. icebang 조직 +INSERT INTO `organization` (`name`, `domain_name`) VALUES + ('icebang', 'icebang.site'); + +-- 2. icebang 부서들 (직접 ID로 참조) +INSERT INTO `department` (`organization_id`, `name`) VALUES + (1, 'AI개발팀'), + (1, '데이터팀'), + (1, '콘텐츠팀'), + (1, '마케팅팀'), + (1, '운영팀'), + (1, '기획팀'); + +-- 3. icebang 직책들 (직접 ID로 참조) +INSERT INTO `position` (`organization_id`, `title`) VALUES + (1, 'CEO'), + (1, 'CTO'), + (1, '팀장'), + (1, '시니어'), + (1, '주니어'), + (1, '인턴'); + +-- 4. 바이럴 콘텐츠 워크플로우 권한들 +INSERT INTO `permission` (`resource`, `description`) VALUES +-- 사용자 관리 +('users.create', '사용자 생성'), +('users.read', '사용자 조회'), +('users.read.own', '본인 정보 조회'), +('users.read.department', '부서 내 사용자 조회'), +('users.read.organization', '조직 전체 사용자 조회'), +('users.update', '사용자 정보 수정'), +('users.update.own', '본인 정보 수정'), +('users.delete', '사용자 삭제'), +('users.invite', '사용자 초대'), + +-- 조직 관리 +('organizations.read', '조직 조회'), +('organizations.settings', '조직 설정 관리'), + +-- 부서 관리 +('departments.read', '부서 조회'), +('departments.manage', '부서 관리'), + +-- 역할/권한 관리 +('roles.create', '역할 생성'), +('roles.read', '역할 조회'), +('roles.update', '역할 수정'), +('roles.assign', '역할 할당'), +('permissions.read', '권한 조회'), +('permissions.assign', '권한 할당'), + +-- 트렌드 키워드 관리 +('trends.read', '트렌드 키워드 조회'), +('trends.create', '트렌드 키워드 등록'), +('trends.update', '트렌드 키워드 수정'), +('trends.delete', '트렌드 키워드 삭제'), +('trends.analyze', '트렌드 분석'), + +-- 크롤링 관리 +('crawling.create', '크롤링 작업 생성'), +('crawling.read', '크롤링 결과 조회'), +('crawling.update', '크롤링 설정 수정'), +('crawling.delete', '크롤링 데이터 삭제'), +('crawling.execute', '크롤링 실행'), +('crawling.schedule', '크롤링 스케줄 관리'), + +-- 콘텐츠 생성 +('content.create', '콘텐츠 생성'), +('content.read', '콘텐츠 조회'), +('content.read.own', '본인 콘텐츠만 조회'), +('content.read.department', '부서 콘텐츠 조회'), +('content.read.all', '모든 콘텐츠 조회'), +('content.update', '콘텐츠 수정'), +('content.delete', '콘텐츠 삭제'), +('content.publish', '콘텐츠 발행'), +('content.approve', '콘텐츠 승인'), +('content.reject', '콘텐츠 거절'), + +-- AI 모델 관리 +('ai.models.read', 'AI 모델 조회'), +('ai.models.create', 'AI 모델 생성'), +('ai.models.update', 'AI 모델 수정'), +('ai.models.delete', 'AI 모델 삭제'), +('ai.models.train', 'AI 모델 학습'), +('ai.models.deploy', 'AI 모델 배포'), + +-- 워크플로우 관리 +('workflows.create', '워크플로우 생성'), +('workflows.read', '워크플로우 조회'), +('workflows.update', '워크플로우 수정'), +('workflows.delete', '워크플로우 삭제'), +('workflows.execute', '워크플로우 실행'), +('workflows.schedule', '워크플로우 스케줄링'), + +-- 캠페인 관리 +('campaigns.create', '캠페인 생성'), +('campaigns.read', '캠페인 조회'), +('campaigns.update', '캠페인 수정'), +('campaigns.delete', '캠페인 삭제'), +('campaigns.execute', '캠페인 실행'), + +-- 시스템 관리 +('system.health', '시스템 상태 조회'), +('system.logs', '시스템 로그 조회'), +('system.backup', '시스템 백업'), +('system.config', '시스템 설정 관리'); + +-- 5. icebang 역할들 +INSERT INTO `role` (`organization_id`, `name`, `description`) VALUES +-- 글로벌 관리자 역할 +(NULL, 'SUPER_ADMIN', '전체 시스템 관리자 - 모든 권한'), +(NULL, 'ORG_ADMIN', '조직 관리자 - 조직별 모든 권한'), + +-- icebang 전용 역할들 +(1, 'AI_ENGINEER', 'AI 개발자 - AI 모델 관리 및 워크플로우'), +(1, 'DATA_SCIENTIST', '데이터 과학자 - 트렌드 분석 및 데이터 관리'), +(1, 'CONTENT_MANAGER', '콘텐츠 매니저 - 콘텐츠 생성 및 관리'), +(1, 'MARKETING_SPECIALIST', '마케팅 전문가 - 캠페인 관리'), +(1, 'WORKFLOW_ADMIN', '워크플로우 관리자 - 워크플로우 전체 관리'), +(1, 'CRAWLER_OPERATOR', '크롤링 운영자 - 크롤링 작업 관리'), +(1, 'BASIC_USER', '기본 사용자 - 기본 조회 권한'); + +-- 6. icebang 직원들 +INSERT INTO `user` (`name`, `email`, `password`, `status`) VALUES +('김아이스', 'ice.kim@icebang.site', '$2a$10$encrypted_password_hash1', 'ACTIVE'), +('박방방', 'bang.park@icebang.site', '$2a$10$encrypted_password_hash2', 'ACTIVE'), +('이트렌드', 'trend.lee@icebang.site', '$2a$10$encrypted_password_hash3', 'ACTIVE'), +('정바이럴', 'viral.jung@icebang.site', '$2a$10$encrypted_password_hash4', 'ACTIVE'), +('최콘텐츠', 'content.choi@icebang.site', '$2a$10$encrypted_password_hash5', 'ACTIVE'), +('홍크롤러', 'crawler.hong@icebang.site', '$2a$10$encrypted_password_hash6', 'ACTIVE'), +('서데이터', 'data.seo@icebang.site', '$2a$10$encrypted_password_hash7', 'ACTIVE'), +('윤워크플로우', 'workflow.yoon@icebang.site', '$2a$10$encrypted_password_hash8', 'ACTIVE'), +('시스템관리자', 'admin@icebang.site', '$2a$10$encrypted_password_hash9', 'ACTIVE'); + +-- 7. icebang 직원들의 조직 소속 정보 (하드코딩된 ID 사용) +INSERT INTO `user_organization` (`user_id`, `organization_id`, `position_id`, `department_id`, `employee_number`, `status`) VALUES +-- 김아이스(CEO) - 기획팀 +(1, 1, 1, 6, 'PLN25001', 'ACTIVE'), +-- 박방방(CTO) - AI개발팀 +(2, 1, 2, 1, 'AI25001', 'ACTIVE'), +-- 이트렌드(팀장) - 데이터팀 +(3, 1, 3, 2, 'DAT25001', 'ACTIVE'), +-- 정바이럴(팀장) - 콘텐츠팀 +(4, 1, 3, 3, 'CON25001', 'ACTIVE'), +-- 최콘텐츠(시니어) - 콘텐츠팀 +(5, 1, 4, 3, 'CON25002', 'ACTIVE'), +-- 홍크롤러(시니어) - AI개발팀 +(6, 1, 4, 1, 'AI25002', 'ACTIVE'), +-- 서데이터(시니어) - 데이터팀 +(7, 1, 4, 2, 'DAT25002', 'ACTIVE'), +-- 윤워크플로우(팀장) - 운영팀 +(8, 1, 3, 5, 'OPS25001', 'ACTIVE'), +-- 시스템관리자(CTO) - 운영팀 +(9, 1, 2, 5, 'OPS25000', 'ACTIVE'); + +-- 8. 역할별 권한 설정 + +-- SUPER_ADMIN - 모든 권한 (전역) +INSERT INTO `role_permission` (`role_id`, `permission_id`) +SELECT 1, id +FROM permission; + +-- ORG_ADMIN - 조직 관련 모든 권한 +INSERT INTO `role_permission` (`role_id`, `permission_id`) +SELECT 2, id +FROM permission +WHERE resource NOT LIKE 'system.%'; + +-- AI_ENGINEER - AI 및 워크플로우 권한 +INSERT INTO `role_permission` (`role_id`, `permission_id`) +SELECT 3, id +FROM permission +WHERE resource LIKE 'ai.%' + OR resource LIKE 'workflows.%' + OR resource LIKE 'crawling.%' + OR resource IN ('content.read', 'trends.read'); + +-- DATA_SCIENTIST - 데이터 및 분석 권한 +INSERT INTO `role_permission` (`role_id`, `permission_id`) +SELECT 4, id +FROM permission +WHERE resource LIKE 'trends.%' + OR resource LIKE 'crawling.%' + OR resource LIKE 'ai.models.read' + OR resource IN ('content.read', 'workflows.read'); + +-- CONTENT_MANAGER - 콘텐츠 관리 권한 +INSERT INTO `role_permission` (`role_id`, `permission_id`) +SELECT 5, id +FROM permission +WHERE resource LIKE 'content.%' + OR resource LIKE 'campaigns.%' + OR resource IN ('trends.read', 'workflows.read'); + +-- MARKETING_SPECIALIST - 마케팅 및 캠페인 권한 +INSERT INTO `role_permission` (`role_id`, `permission_id`) +SELECT 6, id +FROM permission +WHERE resource LIKE 'campaigns.%' + OR resource IN ('content.read', 'trends.read', 'users.read'); + +-- WORKFLOW_ADMIN - 워크플로우 전체 관리 권한 +INSERT INTO `role_permission` (`role_id`, `permission_id`) +SELECT 7, id +FROM permission +WHERE resource LIKE 'workflows.%' + OR resource LIKE 'ai.%' + OR resource LIKE 'crawling.%' + OR resource LIKE 'system.%' + OR resource IN ('content.read', 'trends.read'); + +-- 9. icebang 직원별 역할 할당 + +-- 김아이스(CEO) - ORG_ADMIN +INSERT INTO `user_role` (`role_id`, `user_organization_id`) VALUES (2, 1); + +-- 박방방(CTO) - AI_ENGINEER + WORKFLOW_ADMIN +INSERT INTO `user_role` (`role_id`, `user_organization_id`) VALUES (3, 2), (7, 2); + +-- 정바이럴(콘텐츠팀장) - CONTENT_MANAGER +INSERT INTO `user_role` (`role_id`, `user_organization_id`) VALUES (5, 4); + +-- 이트렌드(데이터팀장) - DATA_SCIENTIST +INSERT INTO `user_role` (`role_id`, `user_organization_id`) VALUES (4, 3); + +-- 시스템관리자 - SUPER_ADMIN +INSERT INTO `user_role` (`role_id`, `user_organization_id`) VALUES (1, 9); \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/01-insert-internal-users.sql b/apps/user-service/src/main/resources/sql/data/01-insert-internal-users.sql similarity index 100% rename from apps/user-service/src/main/resources/sql/01-insert-internal-users.sql rename to apps/user-service/src/main/resources/sql/data/01-insert-internal-users.sql diff --git a/apps/user-service/src/main/resources/sql/02-insert-external-users.sql b/apps/user-service/src/main/resources/sql/data/02-insert-external-users.sql similarity index 100% rename from apps/user-service/src/main/resources/sql/02-insert-external-users.sql rename to apps/user-service/src/main/resources/sql/data/02-insert-external-users.sql diff --git a/apps/user-service/src/main/resources/sql/data/03-insert-workflow-h2.sql b/apps/user-service/src/main/resources/sql/data/03-insert-workflow-h2.sql new file mode 100644 index 00000000..a4d4129b --- /dev/null +++ b/apps/user-service/src/main/resources/sql/data/03-insert-workflow-h2.sql @@ -0,0 +1,110 @@ +-- =================================================================== +-- 워크플로우 관련 데이터 초기화 (H2 전용) +-- =================================================================== +-- 참조 관계 역순으로 데이터 삭제 +DELETE FROM `schedule`; +DELETE FROM `job_task`; +DELETE FROM `workflow_job`; +DELETE FROM `task`; +DELETE FROM `job`; +DELETE FROM `workflow`; + +-- =================================================================== +-- 워크플로우 정적 데이터 삽입 +-- =================================================================== + +-- 워크플로우 생성 (ID: 1) - H2에서는 NOW() 사용 +INSERT INTO `workflow` (`id`, `name`, `description`, `created_by`, `default_config`) VALUES + (1, '상품 분석 및 블로그 자동 발행', '키워드 검색부터 상품 분석 후 블로그 발행까지의 자동화 프로세스', 1, + JSON_OBJECT('1',json_object('tag','naver'),'9',json_object('tag','blogger','blog_id', '', 'blog_pw', ''))) +ON DUPLICATE KEY UPDATE + name = VALUES(name), + description = VALUES(description), + updated_at = NOW(); +-- Job 생성 (ID: 1, 2) - H2에서는 NOW() 사용 +INSERT INTO `job` (`id`, `name`, `description`, `created_by`) VALUES + (1, '상품 분석', '키워드 검색, 상품 크롤링 및 유사도 분석 작업', 1), + (2, '블로그 콘텐츠 생성', '분석 데이터를 기반으로 RAG 콘텐츠 생성 및 발행 작업', 1) + ON DUPLICATE KEY UPDATE name = VALUES(name), description = VALUES(description), updated_at = NOW(); + +-- Task 생성 (ID: 1 ~ 9) - H2에서는 NOW() 사용 +INSERT INTO `task` (`id`, `name`, `type`, `parameters`) VALUES + (1, '키워드 검색 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/keywords/search', 'method', 'POST', + 'body', JSON_OBJECT('tag', 'String') -- { "tag": str } + )), + (2, '상품 검색 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/products/search', 'method', 'POST', + 'body', JSON_OBJECT('keyword', 'String') -- { "keyword": str } + )), + (3, '상품 매칭 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/products/match', 'method', 'POST', + 'body', JSON_OBJECT( -- { keyword: str, search_results: List } + 'keyword', 'String', + 'search_results', 'List' + ) + )), + (4, '상품 유사도 분석 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/products/similarity', 'method', 'POST', + 'body', JSON_OBJECT( -- { keyword: str, matched_products: List, search_results: List } + 'keyword', 'String', + 'matched_products', 'List', + 'search_results', 'List' + ) + )), + (5, '상품 정보 크롤링 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/products/crawl', 'method', 'POST', + 'body', JSON_OBJECT('product_urls', 'List') -- { "product_urls": List[str] } 수정됨 + )), + (6, 'S3 업로드 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/products/s3-upload', 'method', 'POST', + 'body', JSON_OBJECT( -- { keyword: str, crawled_products: List, base_folder: str } + 'keyword', 'String', + 'crawled_products', 'List', + 'base_folder', 'String' + ) + )), + (7, '상품 선택 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/products/select', 'method', 'POST', + 'body', JSON_OBJECT( -- { task_run_id: int, selection_criteria: str } + 'task_run_id', 'Integer', + 'selection_criteria', 'String' + ) + )), + -- RAG관련 request body는 추후에 결정될 예정 + (8, '블로그 RAG 생성 태스크', 'FastAPI', JSON_OBJECT('endpoint', '/blogs/rag/create', 'method', 'POST')), + (9, '블로그 발행 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/blogs/publish', 'method', 'POST', + 'body', JSON_OBJECT( -- { tag: str, blog_id: str, ... } + 'tag', 'String', + 'blog_id', 'String', + 'blog_pw', 'String', + 'blog_name', 'String', + 'post_title', 'String', + 'post_content', 'String', + 'post_tags', 'List' + ) + )) + ON DUPLICATE KEY UPDATE name = VALUES(name), type = VALUES(type), parameters = VALUES(parameters), updated_at = NOW(); + +-- =================================================================== +-- 워크플로우 구조 및 스케줄 데이터 삽입 +-- =================================================================== +-- 워크플로우-Job 연결 +INSERT INTO `workflow_job` (`workflow_id`, `job_id`, `execution_order`) VALUES + (1, 1, 1), + (1, 2, 2) + ON DUPLICATE KEY UPDATE execution_order = VALUES(execution_order); + +-- Job-Task 연결 +INSERT INTO `job_task` (`job_id`, `task_id`, `execution_order`) VALUES + -- Job 1: 상품 분석 (키워드검색 → 상품검색 → 매칭 → 유사도 → 크롤링 → S3업로드 → 상품선택) + (1, 1, 1), (1, 2, 2), (1, 3, 3), (1, 4, 4), (1, 5, 5), (1, 6, 6), (1, 7, 7), + -- Job 2: 블로그 콘텐츠 생성 (RAG생성 → 발행) + (2, 8, 1), (2, 9, 2) + ON DUPLICATE KEY UPDATE execution_order = VALUES(execution_order); + +-- 스케줄 설정 (매일 오전 8시) - H2에서는 NOW() 사용 +INSERT INTO `schedule` (`workflow_id`, `cron_expression`, `is_active`, `created_by`) VALUES + (1, '0 0 8 * * ?', TRUE, 1) + ON DUPLICATE KEY UPDATE cron_expression = VALUES(cron_expression), is_active = VALUES(is_active), updated_at = NOW(); \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/03-insert-workflow.sql b/apps/user-service/src/main/resources/sql/data/03-insert-workflow.sql similarity index 97% rename from apps/user-service/src/main/resources/sql/03-insert-workflow.sql rename to apps/user-service/src/main/resources/sql/data/03-insert-workflow.sql index 379140b5..e7e28042 100644 --- a/apps/user-service/src/main/resources/sql/03-insert-workflow.sql +++ b/apps/user-service/src/main/resources/sql/data/03-insert-workflow.sql @@ -20,12 +20,12 @@ INSERT INTO `workflow` (`id`, `name`, `description`, `created_by`, `default_conf ON DUPLICATE KEY UPDATE name = VALUES(name), description = VALUES(description), - updated_at = NOW(); + updated_at = UTC_TIMESTAMP(); -- Job 생성 (ID: 1, 2) INSERT INTO `job` (`id`, `name`, `description`, `created_by`) VALUES (1, '상품 분석', '키워드 검색, 상품 크롤링 및 유사도 분석 작업', 1), (2, '블로그 콘텐츠 생성', '분석 데이터를 기반으로 RAG 콘텐츠 생성 및 발행 작업', 1) - ON DUPLICATE KEY UPDATE name = VALUES(name), description = VALUES(description), updated_at = NOW(); + ON DUPLICATE KEY UPDATE name = VALUES(name), description = VALUES(description), updated_at = UTC_TIMESTAMP(); -- Task 생성 (ID: 1 ~ 9) INSERT INTO `task` (`id`, `name`, `type`, `parameters`) VALUES @@ -85,7 +85,7 @@ INSERT INTO `task` (`id`, `name`, `type`, `parameters`) VALUES 'post_tags', 'List' ) )) - ON DUPLICATE KEY UPDATE name = VALUES(name), type = VALUES(type), parameters = VALUES(parameters), updated_at = NOW(); + ON DUPLICATE KEY UPDATE name = VALUES(name), type = VALUES(type), parameters = VALUES(parameters), updated_at = UTC_TIMESTAMP(); -- =================================================================== -- 워크플로우 구조 및 스케줄 데이터 삽입 @@ -107,4 +107,4 @@ INSERT INTO `job_task` (`job_id`, `task_id`, `execution_order`) VALUES -- 스케줄 설정 (매일 오전 8시) INSERT INTO `schedule` (`workflow_id`, `cron_expression`, `is_active`, `created_by`) VALUES (1, '0 0 8 * * ?', TRUE, 1) - ON DUPLICATE KEY UPDATE cron_expression = VALUES(cron_expression), is_active = VALUES(is_active), updated_at = NOW(); \ No newline at end of file + ON DUPLICATE KEY UPDATE cron_expression = VALUES(cron_expression), is_active = VALUES(is_active), updated_at = UTC_TIMESTAMP(); \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/data/04-insert-workflow-history-h2.sql b/apps/user-service/src/main/resources/sql/data/04-insert-workflow-history-h2.sql new file mode 100644 index 00000000..fbff73da --- /dev/null +++ b/apps/user-service/src/main/resources/sql/data/04-insert-workflow-history-h2.sql @@ -0,0 +1,76 @@ +-- =================================================================== +-- 워크플로우 히스토리 테스트용 데이터 삽입 (H2 전용) +-- =================================================================== + +-- 기존 실행 데이터 삭제 (참조 순서 고려) +DELETE FROM `task_run` WHERE id = 1; +DELETE FROM `job_run` WHERE id = 1; +DELETE FROM `workflow_run` WHERE id = 1; + +-- AUTO_INCREMENT 초기화 +ALTER TABLE `task_run` AUTO_INCREMENT = 1; +ALTER TABLE `job_run` AUTO_INCREMENT = 1; +ALTER TABLE `workflow_run` AUTO_INCREMENT = 1; + +-- 워크플로우 실행 데이터 삽입 (workflow_run) +INSERT INTO `workflow_run` ( + `workflow_id`, + `trace_id`, + `run_number`, + `status`, + `trigger_type`, + `started_at`, + `finished_at`, + `created_by` +) VALUES ( + 1, + '3e3c832d-b51f-48ea-95f9-98f0ae6d3413', + NULL, + 'FAILED', + NULL, + '2025-09-22 18:18:43', + '2025-09-22 18:18:44', + NULL + ); + +-- Job 실행 데이터 삽입 (job_run) - H2에서는 NOW() 사용 +INSERT INTO `job_run` ( + `id`, + `workflow_run_id`, + `job_id`, + `status`, + `execution_order`, + `started_at`, + `finished_at`, + `created_at` +) VALUES ( + 1, + 1, + 1, + 'FAILED', + NULL, + '2025-09-22 18:18:44', + '2025-09-22 18:18:44', + NOW() + ); + +-- Task 실행 데이터 삽입 (task_run) - H2에서는 NOW() 사용 +INSERT INTO `task_run` ( + `id`, + `job_run_id`, + `task_id`, + `status`, + `execution_order`, + `started_at`, + `finished_at`, + `created_at` +) VALUES ( + 1, + 1, + 1, + 'FAILED', + NULL, + '2025-09-22 18:18:44', + '2025-09-22 18:18:44', + NOW() + ); \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/04-insert-workflow-history.sql b/apps/user-service/src/main/resources/sql/data/04-insert-workflow-history.sql similarity index 96% rename from apps/user-service/src/main/resources/sql/04-insert-workflow-history.sql rename to apps/user-service/src/main/resources/sql/data/04-insert-workflow-history.sql index 814c3b5b..d45f9534 100644 --- a/apps/user-service/src/main/resources/sql/04-insert-workflow-history.sql +++ b/apps/user-service/src/main/resources/sql/data/04-insert-workflow-history.sql @@ -51,7 +51,7 @@ INSERT INTO `job_run` ( NULL, '2025-09-22 18:18:44', '2025-09-22 18:18:44', - NOW() + UTC_TIMESTAMP() ); -- Task 실행 데이터 삽입 (task_run) @@ -72,5 +72,5 @@ INSERT INTO `task_run` ( NULL, '2025-09-22 18:18:44', '2025-09-22 18:18:44', - NOW() + UTC_TIMESTAMP() ); \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/data/05-fix-timezone-data-h2.sql b/apps/user-service/src/main/resources/sql/data/05-fix-timezone-data-h2.sql new file mode 100644 index 00000000..dbdf155a --- /dev/null +++ b/apps/user-service/src/main/resources/sql/data/05-fix-timezone-data-h2.sql @@ -0,0 +1,33 @@ +-- =================================================================== +-- 기존 서버 데이터의 시간대 보정 (KST → UTC 변환) - H2 전용 +-- =================================================================== +-- 이 스크립트는 서버에 올라가 있는 기존 더미데이터들의 시간을 UTC로 변환합니다. +-- 한국시간(KST, +09:00)으로 저장된 데이터를 UTC(+00:00)로 변환 + +-- =================================================================== +-- 1. 워크플로우 실행 관련 테이블 +-- =================================================================== + +-- workflow_run 테이블 시간 보정 (H2에서는 테이블이 없을 수 있으므로 조건부 실행) +-- UPDATE `workflow_run` SET +-- started_at = CASE +-- WHEN started_at IS NOT NULL THEN DATEADD('HOUR', -9, started_at) +-- ELSE NULL +-- END, +-- finished_at = CASE +-- WHEN finished_at IS NOT NULL THEN DATEADD('HOUR', -9, finished_at) +-- ELSE NULL +-- END, +-- created_at = CASE +-- WHEN created_at IS NOT NULL THEN DATEADD('HOUR', -9, created_at) +-- ELSE NULL +-- END +-- WHERE started_at IS NOT NULL +-- OR finished_at IS NOT NULL +-- OR created_at IS NOT NULL; + +-- =================================================================== +-- 완료 메시지 +-- =================================================================== +-- 이 스크립트 실행 후 모든 시간 데이터가 UTC 기준으로 변환됩니다. +-- 애플리케이션에서 Instant를 사용하여 UTC 시간으로 처리됩니다. \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/data/05-fix-timezone-data.sql b/apps/user-service/src/main/resources/sql/data/05-fix-timezone-data.sql new file mode 100644 index 00000000..be6fdc57 --- /dev/null +++ b/apps/user-service/src/main/resources/sql/data/05-fix-timezone-data.sql @@ -0,0 +1,250 @@ +# -- =================================================================== +# -- 기존 서버 데이터의 시간대 보정 (KST → UTC 변환) +# -- =================================================================== +# -- 이 스크립트는 서버에 올라가 있는 기존 더미데이터들의 시간을 UTC로 변환합니다. +# -- 한국시간(KST, +09:00)으로 저장된 데이터를 UTC(+00:00)로 변환 +# +# -- =================================================================== +# -- 1. 워크플로우 실행 관련 테이블 +# -- =================================================================== +# +# -- workflow_run 테이블 시간 보정 +# UPDATE `workflow_run` SET +# started_at = CASE +# WHEN started_at IS NOT NULL THEN DATE_SUB(started_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# finished_at = CASE +# WHEN finished_at IS NOT NULL THEN DATE_SUB(finished_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE started_at IS NOT NULL +# OR finished_at IS NOT NULL +# OR created_at IS NOT NULL; +# +# -- job_run 테이블 시간 보정 +# UPDATE `job_run` SET +# started_at = CASE +# WHEN started_at IS NOT NULL THEN DATE_SUB(started_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# finished_at = CASE +# WHEN finished_at IS NOT NULL THEN DATE_SUB(finished_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE started_at IS NOT NULL +# OR finished_at IS NOT NULL +# OR created_at IS NOT NULL; +# +# -- task_run 테이블 시간 보정 +# UPDATE `task_run` SET +# started_at = CASE +# WHEN started_at IS NOT NULL THEN DATE_SUB(started_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# finished_at = CASE +# WHEN finished_at IS NOT NULL THEN DATE_SUB(finished_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE started_at IS NOT NULL +# OR finished_at IS NOT NULL +# OR created_at IS NOT NULL; +# +# -- =================================================================== +# -- 2. 마스터 데이터 테이블들 +# -- =================================================================== +# +# -- workflow 테이블 시간 보정 +# UPDATE `workflow` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- job 테이블 시간 보정 +# UPDATE `job` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- task 테이블 시간 보정 +# UPDATE `task` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- schedule 테이블 시간 보정 +# UPDATE `schedule` SET +# last_run_at = CASE +# WHEN last_run_at IS NOT NULL THEN DATE_SUB(last_run_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE last_run_at IS NOT NULL +# OR created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- =================================================================== +# -- 3. 사용자 관련 테이블들 +# -- =================================================================== +# +# -- user 테이블 시간 보정 +# UPDATE `user` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# joined_at = CASE +# WHEN joined_at IS NOT NULL THEN DATE_SUB(joined_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL +# OR joined_at IS NOT NULL; +# +# -- user_organization 테이블 시간 보정 +# UPDATE `user_organization` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- organization 테이블 시간 보정 +# UPDATE `organization` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- =================================================================== +# -- 4. 기타 시스템 테이블들 +# -- =================================================================== +# +# -- permission 테이블 시간 보정 +# UPDATE `permission` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- execution_log 테이블 시간 보정 +# UPDATE `execution_log` SET +# executed_at = CASE +# WHEN executed_at IS NOT NULL THEN DATE_SUB(executed_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# reserved5 = CASE +# WHEN reserved5 IS NOT NULL THEN DATE_SUB(reserved5, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE executed_at IS NOT NULL +# OR reserved5 IS NOT NULL; +# +# -- task_io_data 테이블 시간 보정 +# UPDATE `task_io_data` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL; +# +# -- config 테이블 시간 보정 +# UPDATE `config` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL; +# +# -- category 테이블 시간 보정 +# UPDATE `category` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- user_config 테이블 시간 보정 +# UPDATE `user_config` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- =================================================================== +# -- 완료 메시지 +# -- =================================================================== +# -- 이 스크립트 실행 후 모든 시간 데이터가 UTC 기준으로 변환됩니다. +# -- 애플리케이션에서 Instant를 사용하여 UTC 시간으로 처리됩니다. \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/00-drop-h2.sql b/apps/user-service/src/main/resources/sql/schema/00-drop-h2.sql similarity index 100% rename from apps/user-service/src/main/resources/sql/00-drop-h2.sql rename to apps/user-service/src/main/resources/sql/schema/00-drop-h2.sql diff --git a/apps/user-service/src/main/resources/sql/00-drop-maria.sql b/apps/user-service/src/main/resources/sql/schema/00-drop-maria.sql similarity index 100% rename from apps/user-service/src/main/resources/sql/00-drop-maria.sql rename to apps/user-service/src/main/resources/sql/schema/00-drop-maria.sql diff --git a/apps/user-service/src/main/resources/sql/01-schema.sql b/apps/user-service/src/main/resources/sql/schema/01-schema.sql similarity index 100% rename from apps/user-service/src/main/resources/sql/01-schema.sql rename to apps/user-service/src/main/resources/sql/schema/01-schema.sql diff --git a/apps/user-service/src/main/resources/sql/02-quartz-schema.sql b/apps/user-service/src/main/resources/sql/schema/02-quartz-schema.sql similarity index 100% rename from apps/user-service/src/main/resources/sql/02-quartz-schema.sql rename to apps/user-service/src/main/resources/sql/schema/02-quartz-schema.sql diff --git a/apps/user-service/src/main/resources/sql/schema/03-schema-h2-timezone.sql b/apps/user-service/src/main/resources/sql/schema/03-schema-h2-timezone.sql new file mode 100644 index 00000000..3ae6c57b --- /dev/null +++ b/apps/user-service/src/main/resources/sql/schema/03-schema-h2-timezone.sql @@ -0,0 +1,51 @@ +-- =================================================================== +-- H2 전용 UTC Timezone 처리를 위한 스키마 수정 (v0.5) +-- =================================================================== +-- H2 데이터베이스는 MariaDB와 다른 문법을 사용하므로 별도 처리 + +-- 모든 timestamp 컬럼의 기본값 제거 (H2에서는 MODIFY COLUMN 문법이 다름) +-- H2에서는 ALTER TABLE table_name ALTER COLUMN column_name 문법 사용 +-- H2 MariaDB 모드에서는 백틱으로 테이블명을 감싸야 함 + +ALTER TABLE `permission` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `permission` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `organization` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `organization` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `user` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `user` ALTER COLUMN updated_at SET DEFAULT NULL; +ALTER TABLE `user` ALTER COLUMN joined_at SET DEFAULT NULL; + +ALTER TABLE `user_organization` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `user_organization` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `workflow` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `workflow` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `schedule` ALTER COLUMN last_run_at SET DEFAULT NULL; +ALTER TABLE `schedule` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `schedule` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `job` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `job` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `task` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `task` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `execution_log` ALTER COLUMN executed_at SET DEFAULT NULL; +ALTER TABLE `execution_log` ALTER COLUMN reserved5 SET DEFAULT NULL; + +ALTER TABLE `task_io_data` ALTER COLUMN created_at SET DEFAULT NULL; + +-- config 테이블이 존재하는지 확인 후 ALTER 실행 +-- ALTER TABLE `config` ALTER COLUMN created_at SET DEFAULT NULL; + +ALTER TABLE `category` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `category` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `user_config` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `user_config` ALTER COLUMN updated_at SET DEFAULT NULL; + +-- 워크플로우 실행 테이블들 (기본값이 이미 NULL이므로 변경 불필요) +-- workflow_run, job_run, task_run 테이블은 이미 DEFAULT 값이 없음 \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/schema/03-schema-mariadb-timezone.sql b/apps/user-service/src/main/resources/sql/schema/03-schema-mariadb-timezone.sql new file mode 100644 index 00000000..23f7f112 --- /dev/null +++ b/apps/user-service/src/main/resources/sql/schema/03-schema-mariadb-timezone.sql @@ -0,0 +1,49 @@ +-- =================================================================== +-- MariaDB 전용 UTC Timezone 처리를 위한 스키마 수정 (v0.5) +-- =================================================================== +-- MariaDB에서는 UTC_TIMESTAMP() 함수를 사용할 수 있지만, +-- 애플리케이션에서 Instant로 처리하므로 기본값을 제거 + +-- 모든 timestamp 컬럼의 기본값을 UTC 기준으로 변경 +ALTER TABLE `permission` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `permission` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `organization` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `organization` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `user` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `user` MODIFY COLUMN updated_at timestamp NULL; +ALTER TABLE `user` MODIFY COLUMN joined_at timestamp NULL; + +ALTER TABLE `user_organization` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `user_organization` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `workflow` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `workflow` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `schedule` MODIFY COLUMN last_run_at timestamp NULL; +ALTER TABLE `schedule` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `schedule` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `job` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `job` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `task` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `task` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `execution_log` MODIFY COLUMN executed_at timestamp NULL; +ALTER TABLE `execution_log` MODIFY COLUMN reserved5 timestamp NULL; + +ALTER TABLE `task_io_data` MODIFY COLUMN created_at timestamp NULL; + +-- config 테이블이 존재하지 않아 ALTER 실행 불가 +-- ALTER TABLE `config` MODIFY COLUMN created_at timestamp NULL; + +ALTER TABLE `category` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `category` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `user_config` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `user_config` MODIFY COLUMN updated_at timestamp NULL; + +-- 워크플로우 실행 테이블 (이미 DEFAULT 값이 없으므로 변경 불필요) +-- workflow_run, job_run, task_run 테이블들은 기본값이 이미 적절히 설정됨 \ No newline at end of file diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java index 67e6820a..636b3455 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java @@ -15,7 +15,10 @@ import site.icebang.e2e.setup.support.E2eTestSupport; @Sql( - value = {"classpath:sql/00-truncate.sql", "classpath:sql/01-insert-internal-users.sql"}, + value = { + "classpath:sql/data/00-truncate.sql", + "classpath:sql/data/01-insert-internal-users.sql" + }, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS) @DisplayName("사용자 로그아웃 플로우 E2E 테스트") @E2eTest diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java index 1bc1903b..fd3eee60 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java @@ -15,7 +15,10 @@ import site.icebang.e2e.setup.support.E2eTestSupport; @Sql( - value = {"classpath:sql/00-truncate.sql", "classpath:sql/01-insert-internal-users.sql"}, + value = { + "classpath:sql/data/00-truncate.sql", + "classpath:sql/data/01-insert-internal-users.sql" + }, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS) @DisplayName("사용자 등록 플로우 E2E 테스트") class UserRegistrationFlowE2eTest extends E2eTestSupport { diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/WorkflowCreateFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/WorkflowCreateFlowE2eTest.java index 115bec64..3d5ca4b8 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/scenario/WorkflowCreateFlowE2eTest.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/WorkflowCreateFlowE2eTest.java @@ -2,6 +2,7 @@ import static org.assertj.core.api.Assertions.assertThat; +import java.time.Instant; import java.util.HashMap; import java.util.Map; @@ -14,7 +15,10 @@ import site.icebang.e2e.setup.support.E2eTestSupport; @Sql( - value = {"classpath:sql/00-truncate.sql", "classpath:sql/01-insert-internal-users.sql"}, + value = { + "classpath:sql/data/00-truncate.sql", + "classpath:sql/data/01-insert-internal-users.sql" + }, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS) @DisplayName("워크플로우 생성 플로우 E2E 테스트") @E2eTest @@ -216,4 +220,80 @@ private void performUserLogin() { logSuccess("사용자 로그인 완료"); } + + @Test + @DisplayName("워크플로우 생성 시 UTC 시간 기반으로 생성 시간이 저장되는지 검증") + void createWorkflow_utc_time_validation() throws Exception { + logStep(1, "사용자 로그인"); + performUserLogin(); + + logStep(2, "워크플로우 생성 전 현재 시간 기록 (UTC 기준)"); + Instant beforeCreate = Instant.now(); + + logStep(3, "워크플로우 생성"); + Map workflowRequest = new HashMap<>(); + workflowRequest.put("name", "UTC 시간 검증 워크플로우"); + workflowRequest.put("description", "UTC 시간대 보장을 위한 테스트 워크플로우"); + workflowRequest.put("search_platform", "naver"); + workflowRequest.put("is_enabled", true); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> entity = new HttpEntity<>(workflowRequest, headers); + + ResponseEntity createResponse = + restTemplate.postForEntity(getV0ApiUrl("/workflows"), entity, Map.class); + + assertThat(createResponse.getStatusCode()).isEqualTo(HttpStatus.CREATED); + assertThat((Boolean) createResponse.getBody().get("success")).isTrue(); + + logStep(4, "생성 직후 시간 기록 (UTC 기준)"); + Instant afterCreate = Instant.now(); + + logStep(5, "생성된 워크플로우 목록 조회하여 시간 검증"); + ResponseEntity listResponse = + restTemplate.getForEntity(getV0ApiUrl("/workflows"), Map.class); + + assertThat(listResponse.getStatusCode()).isEqualTo(HttpStatus.OK); + assertThat((Boolean) listResponse.getBody().get("success")).isTrue(); + + @SuppressWarnings("unchecked") + Map data = (Map) listResponse.getBody().get("data"); + + logDebug("API 응답 구조: " + data); + + @SuppressWarnings("unchecked") + java.util.List> workflows = + (java.util.List>) data.get("data"); + + assertThat(workflows).isNotNull(); + + // 생성된 워크플로우 찾기 + Map createdWorkflow = + workflows.stream() + .filter(w -> "UTC 시간 검증 워크플로우".equals(w.get("name"))) + .findFirst() + .orElse(null); + + assertThat(createdWorkflow).isNotNull(); + + // createdAt 검증 - UTC 시간 범위 내에 있는지 확인 + String createdAtStr = (String) createdWorkflow.get("createdAt"); + assertThat(createdAtStr).isNotNull(); + // UTC ISO-8601 형식 검증 (예: 2025-09-25T04:48:40Z) + assertThat(createdAtStr).matches("\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}Z"); + + logSuccess("워크플로우가 UTC 시간 기준으로 생성됨을 확인"); + + // 생성 시간이 beforeCreate와 afterCreate 사이에 있는지 검증 (시간대 무관하게 UTC 기준) + logStep(6, "생성 시간이 예상 범위 내에 있는지 검증"); + + // 실제로 생성 시간과 현재 시간의 차이가 합리적인 범위(예: 10초) 내에 있는지 확인 + // 이는 시스템 시간대에 관계없이 UTC 기반으로 일관되게 작동함을 보여줌 + logDebug("생성 시간: " + createdAtStr); + logDebug("현재 UTC 시간: " + Instant.now()); + + logCompletion("UTC 시간 기반 워크플로우 생성 검증 완료"); + } } diff --git a/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java index dd5e0d1a..3b7ce243 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java @@ -48,7 +48,7 @@ GenericContainer lokiContainer(Network network) { static void configureProperties( DynamicPropertyRegistry registry, MariaDBContainer mariadb, GenericContainer loki) { // MariaDB 연결 설정 - registry.add("spring.datasource.url", mariadb::getJdbcUrl); + registry.add("spring.datasource.url", () -> mariadb.getJdbcUrl() + "?serverTimezone=UTC"); registry.add("spring.datasource.username", mariadb::getUsername); registry.add("spring.datasource.password", mariadb::getPassword); registry.add("spring.datasource.driver-class-name", () -> "org.mariadb.jdbc.Driver"); diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java index 276ce7c8..333fb55d 100644 --- a/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java @@ -27,7 +27,7 @@ import site.icebang.integration.setup.support.IntegrationTestSupport; @Sql( - value = "classpath:sql/01-insert-internal-users.sql", + value = "classpath:sql/data/01-insert-internal-users.sql", executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) @Transactional class AuthApiIntegrationTest extends IntegrationTestSupport { diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java index 666a8ea5..44ffd1b4 100644 --- a/apps/user-service/src/test/java/site/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java @@ -22,8 +22,8 @@ @Sql( value = { - "classpath:sql/01-insert-internal-users.sql", - "classpath:sql/02-insert-external-users.sql" + "classpath:sql/data/01-insert-internal-users.sql", + "classpath:sql/data/02-insert-external-users.sql" }, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) @Transactional diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowHistoryApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowHistoryApiIntegrationTest.java index 4703e9f6..f2be6c1f 100644 --- a/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowHistoryApiIntegrationTest.java +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowHistoryApiIntegrationTest.java @@ -2,6 +2,7 @@ import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; import static com.epages.restdocs.apispec.ResourceDocumentation.*; +import static org.hamcrest.Matchers.matchesPattern; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.operation.preprocess.Preprocessors.*; import static org.springframework.restdocs.payload.PayloadDocumentation.*; @@ -20,9 +21,9 @@ @Sql( value = { - "classpath:sql/01-insert-internal-users.sql", - "classpath:sql/03-insert-workflow.sql", - "classpath:sql/04-insert-workflow-history.sql" + "classpath:sql/data/01-insert-internal-users.sql", + "classpath:sql/data/03-insert-workflow-h2.sql", + "classpath:sql/data/04-insert-workflow-history-h2.sql" }, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) @Transactional @@ -61,6 +62,16 @@ void getWorkflowRunDetail_success() throws Exception { .andExpect(jsonPath("$.data.workflowRun.durationMs").value(1000)) .andExpect(jsonPath("$.data.workflowRun.createdBy").isEmpty()) .andExpect(jsonPath("$.data.workflowRun.createdAt").exists()) + // UTC 시간 형식 검증 (시간대 보장) - 마이크로초 포함 가능 + .andExpect( + jsonPath("$.data.workflowRun.startedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.workflowRun.finishedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.workflowRun.createdAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) // jobRuns 배열 확인 .andExpect(jsonPath("$.data.jobRuns").isArray()) .andExpect(jsonPath("$.data.jobRuns.length()").value(1)) @@ -75,6 +86,13 @@ void getWorkflowRunDetail_success() throws Exception { .andExpect(jsonPath("$.data.jobRuns[0].startedAt").value("2025-09-22 18:18:44")) .andExpect(jsonPath("$.data.jobRuns[0].finishedAt").value("2025-09-22 18:18:44")) .andExpect(jsonPath("$.data.jobRuns[0].durationMs").value(0)) + // JobRun UTC 시간 형식 검증 - 마이크로초 포함 가능 + .andExpect( + jsonPath("$.data.jobRuns[0].startedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.jobRuns[0].finishedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) // taskRuns 배열 확인 .andExpect(jsonPath("$.data.jobRuns[0].taskRuns").isArray()) .andExpect(jsonPath("$.data.jobRuns[0].taskRuns.length()").value(1)) @@ -91,6 +109,13 @@ void getWorkflowRunDetail_success() throws Exception { .andExpect( jsonPath("$.data.jobRuns[0].taskRuns[0].finishedAt").value("2025-09-22 18:18:44")) .andExpect(jsonPath("$.data.jobRuns[0].taskRuns[0].durationMs").value(0)) + // TaskRun UTC 시간 형식 검증 - 마이크로초 포함 가능 + .andExpect( + jsonPath("$.data.jobRuns[0].taskRuns[0].startedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.jobRuns[0].taskRuns[0].finishedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) .andDo( document( "workflow-run-detail", @@ -225,4 +250,48 @@ void getWorkflowRunDetail_success() throws Exception { .description("HTTP 상태")) .build()))); } + + @Test + @DisplayName("워크플로우 실행 시간이 UTC 기준으로 일관되게 저장되는지 검증") + @WithUserDetails("admin@icebang.site") + void getWorkflowRunDetail_utc_time_validation() throws Exception { + // given + Long runId = 1L; + + // when & then - UTC 시간 형식 및 시간 순서 검증 + mockMvc + .perform( + get(getApiUrlForDocs("/v0/workflow-runs/{runId}"), runId) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + // WorkflowRun 시간이 UTC 형식인지 검증 - 마이크로초 포함 가능 + .andExpect( + jsonPath("$.data.workflowRun.startedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.workflowRun.finishedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.workflowRun.createdAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + // JobRun 시간이 UTC 형식인지 검증 - 마이크로초 포함 가능 + .andExpect( + jsonPath("$.data.jobRuns[0].startedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.jobRuns[0].finishedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + // TaskRun 시간이 UTC 형식인지 검증 - 마이크로초 포함 가능 + .andExpect( + jsonPath("$.data.jobRuns[0].taskRuns[0].startedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.jobRuns[0].taskRuns[0].finishedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + // 시간 순서 논리적 검증 (startedAt <= finishedAt) + .andExpect(jsonPath("$.data.workflowRun.startedAt").value("2025-09-22 18:18:43")) + .andExpect(jsonPath("$.data.workflowRun.finishedAt").value("2025-09-22 18:18:44")); + } } diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowRunApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowRunApiIntegrationTest.java index 2daa4db1..23c4eaa4 100644 --- a/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowRunApiIntegrationTest.java +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowRunApiIntegrationTest.java @@ -22,7 +22,10 @@ import site.icebang.integration.setup.support.IntegrationTestSupport; @Sql( - value = {"classpath:sql/01-insert-internal-users.sql", "classpath:sql/03-insert-workflow.sql"}, + value = { + "classpath:sql/data/01-insert-internal-users.sql", + "classpath:sql/data/03-insert-workflow-h2.sql" + }, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) @Transactional public class WorkflowRunApiIntegrationTest extends IntegrationTestSupport { From 9e4aa3185eabce358918fb37d1bb64b921f7cba1 Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Fri, 26 Sep 2025 20:03:44 +0900 Subject: [PATCH 12/18] =?UTF-8?q?Jackson=20Timezone=20=EC=A7=81=EB=A0=AC?= =?UTF-8?q?=ED=99=94=EA=B0=80=20=EB=90=98=EC=A7=80=20=EC=95=8A=EB=8D=98=20?= =?UTF-8?q?=EB=AC=B8=EC=A0=9C=20=ED=95=B4=EA=B2=B0=20(#212)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: ObjectMapper timezone 설정 * fix: Workflow 관련 timezone이 찍히지 않는 문제 * fix: ObjectMapper UTC 설정 bean config로 변경 application.yml 설정이 무시됨 * fix: ExecutionLogDto instant로 변환 * test: DB가 UTC 기준으로 저장되어있다고 가정하도록 수정 * test: SQL 09:18 -> 18:18로 일치 * test: H2 DB UTC 고정 --- .../domain/workflow/dto/ExecutionLogDto.java | 4 +- .../domain/workflow/dto/JobRunDto.java | 5 +- .../domain/workflow/dto/TaskRunDto.java | 6 ++- .../domain/workflow/dto/WorkflowRunDto.java | 8 +-- .../site/icebang/global/config/WebConfig.java | 29 +++++++++++ .../src/main/resources/application.yml | 5 -- .../sql/schema/03-schema-h2-timezone.sql | 2 +- .../setup/config/E2eTestConfiguration.java | 5 -- .../e2e/setup/support/E2eTestSupport.java | 3 +- .../setup/config/RestDocsConfiguration.java | 7 --- .../WorkflowHistoryApiIntegrationTest.java | 52 ++++++++++--------- 11 files changed, 75 insertions(+), 51 deletions(-) diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ExecutionLogDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ExecutionLogDto.java index 5dbb5711..7c8595a3 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ExecutionLogDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ExecutionLogDto.java @@ -1,5 +1,7 @@ package site.icebang.domain.workflow.dto; +import java.time.Instant; + import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; @@ -17,6 +19,6 @@ public class ExecutionLogDto { private String logLevel; // info, success, warning, error private String status; // running, success, failed, etc private String logMessage; - private String executedAt; + private Instant executedAt; private Integer durationMs; } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/JobRunDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/JobRunDto.java index 618a6214..8ebe6c51 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/JobRunDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/JobRunDto.java @@ -1,5 +1,6 @@ package site.icebang.domain.workflow.dto; +import java.time.Instant; import java.util.List; import lombok.AllArgsConstructor; @@ -19,8 +20,8 @@ public class JobRunDto { private String jobDescription; private String status; private Integer executionOrder; - private String startedAt; - private String finishedAt; + private Instant startedAt; + private Instant finishedAt; private Integer durationMs; private List taskRuns; } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskRunDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskRunDto.java index 9005c45a..b6bc9a3d 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskRunDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskRunDto.java @@ -1,5 +1,7 @@ package site.icebang.domain.workflow.dto; +import java.time.Instant; + import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; @@ -18,7 +20,7 @@ public class TaskRunDto { private String taskType; private String status; private Integer executionOrder; - private String startedAt; - private String finishedAt; + private Instant startedAt; + private Instant finishedAt; private Integer durationMs; } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowRunDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowRunDto.java index 20b8ecd2..af2a3005 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowRunDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowRunDto.java @@ -1,5 +1,7 @@ package site.icebang.domain.workflow.dto; +import java.time.Instant; + import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; @@ -17,9 +19,9 @@ public class WorkflowRunDto { private String runNumber; private String status; private String triggerType; - private String startedAt; - private String finishedAt; + private Instant startedAt; + private Instant finishedAt; private Integer durationMs; private Long createdBy; - private String createdAt; + private Instant createdAt; } diff --git a/apps/user-service/src/main/java/site/icebang/global/config/WebConfig.java b/apps/user-service/src/main/java/site/icebang/global/config/WebConfig.java index 7029b7d9..9369f887 100644 --- a/apps/user-service/src/main/java/site/icebang/global/config/WebConfig.java +++ b/apps/user-service/src/main/java/site/icebang/global/config/WebConfig.java @@ -1,13 +1,19 @@ package site.icebang.global.config; import java.time.Duration; +import java.util.TimeZone; import org.springframework.boot.web.client.RestTemplateBuilder; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Primary; import org.springframework.http.client.SimpleClientHttpRequestFactory; import org.springframework.web.client.RestTemplate; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; + /** * 애플리케이션의 웹 관련 설정을 담당하는 Java 기반 설정 클래스입니다. * @@ -51,4 +57,27 @@ public RestTemplate restTemplate(RestTemplateBuilder builder) { // 3. 빌더에 직접 생성한 requestFactory를 설정 return builder.requestFactory(() -> requestFactory).build(); } + + /** + * Z 포함 UTC 형식으로 시간을 직렬화하는 ObjectMapper 빈을 생성합니다. + * + *

이 ObjectMapper는 애플리케이션 전역에서 사용되며, 다음과 같은 설정을 적용합니다: + * + *

    + *
  • JavaTimeModule 등록으로 Java 8 시간 API 지원 + *
  • timestamps 대신 ISO 8601 문자열 형식 사용 + *
  • UTC 타임존 설정으로 Z 포함 형식 보장 + *
+ * + * @return Z 포함 UTC 형식이 설정된 ObjectMapper 인스턴스 + * @since v0.0.1 + */ + @Bean + @Primary + public ObjectMapper objectMapper() { + return new ObjectMapper() + .registerModule(new JavaTimeModule()) + .disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS) + .setTimeZone(TimeZone.getTimeZone("UTC")); + } } diff --git a/apps/user-service/src/main/resources/application.yml b/apps/user-service/src/main/resources/application.yml index 55fece16..f6302bc7 100644 --- a/apps/user-service/src/main/resources/application.yml +++ b/apps/user-service/src/main/resources/application.yml @@ -7,11 +7,6 @@ spring: context: cache: maxSize: 1 - jackson: - time-zone: UTC - serialization: - write-dates-as-timestamps: false - mybatis: # Mapper XML 파일 위치 mapper-locations: classpath:mapper/**/*.xml diff --git a/apps/user-service/src/main/resources/sql/schema/03-schema-h2-timezone.sql b/apps/user-service/src/main/resources/sql/schema/03-schema-h2-timezone.sql index 3ae6c57b..018b4d18 100644 --- a/apps/user-service/src/main/resources/sql/schema/03-schema-h2-timezone.sql +++ b/apps/user-service/src/main/resources/sql/schema/03-schema-h2-timezone.sql @@ -6,7 +6,7 @@ -- 모든 timestamp 컬럼의 기본값 제거 (H2에서는 MODIFY COLUMN 문법이 다름) -- H2에서는 ALTER TABLE table_name ALTER COLUMN column_name 문법 사용 -- H2 MariaDB 모드에서는 백틱으로 테이블명을 감싸야 함 - +SET TIME ZONE 'UTC'; ALTER TABLE `permission` ALTER COLUMN created_at SET DEFAULT NULL; ALTER TABLE `permission` ALTER COLUMN updated_at SET DEFAULT NULL; diff --git a/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java index 3b7ce243..c7b18ce8 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java @@ -9,15 +9,10 @@ import org.testcontainers.containers.MariaDBContainer; import org.testcontainers.containers.Network; import org.testcontainers.containers.wait.strategy.Wait; -import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper; import org.testcontainers.utility.DockerImageName; @TestConfiguration(proxyBeanMethods = false) public class E2eTestConfiguration { - @Bean - public ObjectMapper objectMapper() { - return new ObjectMapper(); - } @Bean public Network testNetwork() { diff --git a/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java index 97d1cf0d..002cd307 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/support/E2eTestSupport.java @@ -12,7 +12,8 @@ import org.springframework.http.client.ClientHttpResponse; import org.springframework.test.web.servlet.MockMvc; import org.springframework.web.context.WebApplicationContext; -import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper; + +import com.fasterxml.jackson.databind.ObjectMapper; import jakarta.annotation.PostConstruct; diff --git a/apps/user-service/src/test/java/site/icebang/integration/setup/config/RestDocsConfiguration.java b/apps/user-service/src/test/java/site/icebang/integration/setup/config/RestDocsConfiguration.java index f60de9cc..16285140 100644 --- a/apps/user-service/src/test/java/site/icebang/integration/setup/config/RestDocsConfiguration.java +++ b/apps/user-service/src/test/java/site/icebang/integration/setup/config/RestDocsConfiguration.java @@ -6,8 +6,6 @@ import org.springframework.restdocs.mockmvc.RestDocumentationResultHandler; import org.springframework.restdocs.operation.preprocess.Preprocessors; -import com.fasterxml.jackson.databind.ObjectMapper; - @TestConfiguration public class RestDocsConfiguration { @@ -21,9 +19,4 @@ public RestDocumentationResultHandler restDocumentationResultHandler() { Preprocessors.removeHeaders("Content-Length", "Date", "Keep-Alive", "Connection"), Preprocessors.prettyPrint())); } - - @Bean - public ObjectMapper testObjectMapper() { - return new ObjectMapper(); - } } diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowHistoryApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowHistoryApiIntegrationTest.java index f2be6c1f..f83e0142 100644 --- a/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowHistoryApiIntegrationTest.java +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowHistoryApiIntegrationTest.java @@ -57,21 +57,21 @@ void getWorkflowRunDetail_success() throws Exception { .andExpect(jsonPath("$.data.workflowRun.runNumber").isEmpty()) .andExpect(jsonPath("$.data.workflowRun.status").value("FAILED")) .andExpect(jsonPath("$.data.workflowRun.triggerType").isEmpty()) - .andExpect(jsonPath("$.data.workflowRun.startedAt").value("2025-09-22 18:18:43")) - .andExpect(jsonPath("$.data.workflowRun.finishedAt").value("2025-09-22 18:18:44")) + .andExpect(jsonPath("$.data.workflowRun.startedAt").value("2025-09-22T18:18:43Z")) + .andExpect(jsonPath("$.data.workflowRun.finishedAt").value("2025-09-22T18:18:44Z")) .andExpect(jsonPath("$.data.workflowRun.durationMs").value(1000)) .andExpect(jsonPath("$.data.workflowRun.createdBy").isEmpty()) .andExpect(jsonPath("$.data.workflowRun.createdAt").exists()) // UTC 시간 형식 검증 (시간대 보장) - 마이크로초 포함 가능 .andExpect( jsonPath("$.data.workflowRun.startedAt") - .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .value(matchesPattern("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z$"))) .andExpect( jsonPath("$.data.workflowRun.finishedAt") - .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .value(matchesPattern("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z$"))) .andExpect( jsonPath("$.data.workflowRun.createdAt") - .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .value(matchesPattern("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z$"))) // jobRuns 배열 확인 .andExpect(jsonPath("$.data.jobRuns").isArray()) .andExpect(jsonPath("$.data.jobRuns.length()").value(1)) @@ -83,16 +83,19 @@ void getWorkflowRunDetail_success() throws Exception { .andExpect(jsonPath("$.data.jobRuns[0].jobDescription").value("키워드 검색, 상품 크롤링 및 유사도 분석 작업")) .andExpect(jsonPath("$.data.jobRuns[0].status").value("FAILED")) .andExpect(jsonPath("$.data.jobRuns[0].executionOrder").isEmpty()) - .andExpect(jsonPath("$.data.jobRuns[0].startedAt").value("2025-09-22 18:18:44")) - .andExpect(jsonPath("$.data.jobRuns[0].finishedAt").value("2025-09-22 18:18:44")) + .andExpect(jsonPath("$.data.jobRuns[0].startedAt").value("2025-09-22T18:18:44Z")) + .andExpect(jsonPath("$.data.jobRuns[0].finishedAt").value("2025-09-22T18:18:44Z")) .andExpect(jsonPath("$.data.jobRuns[0].durationMs").value(0)) // JobRun UTC 시간 형식 검증 - 마이크로초 포함 가능 .andExpect( - jsonPath("$.data.jobRuns[0].startedAt") - .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + jsonPath( + "$.data.jobRuns[0].startedAt", + matchesPattern("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z$"))) + // finishedAt 도 동일하게 .andExpect( - jsonPath("$.data.jobRuns[0].finishedAt") - .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + jsonPath( + "$.data.jobRuns[0].finishedAt", + matchesPattern("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z$"))) // taskRuns 배열 확인 .andExpect(jsonPath("$.data.jobRuns[0].taskRuns").isArray()) .andExpect(jsonPath("$.data.jobRuns[0].taskRuns.length()").value(1)) @@ -105,17 +108,18 @@ void getWorkflowRunDetail_success() throws Exception { .andExpect(jsonPath("$.data.jobRuns[0].taskRuns[0].taskType").value("FastAPI")) .andExpect(jsonPath("$.data.jobRuns[0].taskRuns[0].status").value("FAILED")) .andExpect(jsonPath("$.data.jobRuns[0].taskRuns[0].executionOrder").isEmpty()) - .andExpect(jsonPath("$.data.jobRuns[0].taskRuns[0].startedAt").value("2025-09-22 18:18:44")) .andExpect( - jsonPath("$.data.jobRuns[0].taskRuns[0].finishedAt").value("2025-09-22 18:18:44")) + jsonPath("$.data.jobRuns[0].taskRuns[0].startedAt").value("2025-09-22T18:18:44Z")) + .andExpect( + jsonPath("$.data.jobRuns[0].taskRuns[0].finishedAt").value("2025-09-22T18:18:44Z")) .andExpect(jsonPath("$.data.jobRuns[0].taskRuns[0].durationMs").value(0)) // TaskRun UTC 시간 형식 검증 - 마이크로초 포함 가능 .andExpect( jsonPath("$.data.jobRuns[0].taskRuns[0].startedAt") - .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .value(matchesPattern("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z$"))) .andExpect( jsonPath("$.data.jobRuns[0].taskRuns[0].finishedAt") - .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .value(matchesPattern("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z$"))) .andDo( document( "workflow-run-detail", @@ -269,29 +273,29 @@ void getWorkflowRunDetail_utc_time_validation() throws Exception { // WorkflowRun 시간이 UTC 형식인지 검증 - 마이크로초 포함 가능 .andExpect( jsonPath("$.data.workflowRun.startedAt") - .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .value(matchesPattern("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z$"))) .andExpect( jsonPath("$.data.workflowRun.finishedAt") - .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .value(matchesPattern("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z$"))) .andExpect( jsonPath("$.data.workflowRun.createdAt") - .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .value(matchesPattern("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z$"))) // JobRun 시간이 UTC 형식인지 검증 - 마이크로초 포함 가능 .andExpect( jsonPath("$.data.jobRuns[0].startedAt") - .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .value(matchesPattern("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z$"))) .andExpect( jsonPath("$.data.jobRuns[0].finishedAt") - .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .value(matchesPattern("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z$"))) // TaskRun 시간이 UTC 형식인지 검증 - 마이크로초 포함 가능 .andExpect( jsonPath("$.data.jobRuns[0].taskRuns[0].startedAt") - .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .value(matchesPattern("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z$"))) .andExpect( jsonPath("$.data.jobRuns[0].taskRuns[0].finishedAt") - .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .value(matchesPattern("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z$"))) // 시간 순서 논리적 검증 (startedAt <= finishedAt) - .andExpect(jsonPath("$.data.workflowRun.startedAt").value("2025-09-22 18:18:43")) - .andExpect(jsonPath("$.data.workflowRun.finishedAt").value("2025-09-22 18:18:44")); + .andExpect(jsonPath("$.data.workflowRun.startedAt").value("2025-09-22T18:18:43Z")) + .andExpect(jsonPath("$.data.workflowRun.finishedAt").value("2025-09-22T18:18:44Z")); } } From a905f9deb1b2e1b38e0277ad79753f94aba7687a Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Sat, 27 Sep 2025 13:09:21 +0900 Subject: [PATCH 13/18] =?UTF-8?q?ExecutionLog=20API=20=EA=B5=AC=ED=98=84?= =?UTF-8?q?=20=EB=B0=8F=20traceId=20=EC=9D=BC=EA=B4=80=EC=84=B1=20?= =?UTF-8?q?=EA=B0=9C=EC=84=A0=20(#215)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Execution log api * fix: Workflow run 시 trace id가 달라지던 버그 MDC에서 trace id를 가져오도록 설정 없다면 uuid 재발급 --- .../domain/log/mapper/ExecutionLogMapper.java | 13 ++ .../log/service/ExecutionLogService.java | 21 ++ .../controller/WorkflowHistoryController.java | 14 ++ .../domain/workflow/dto/ExecutionLogDto.java | 2 + .../dto/log/ExecutionLogSimpleDto.java | 34 +++ .../workflow/dto/log/ExecutionType.java | 7 + .../dto/log/TaskExecutionMessagesDto.java | 15 ++ .../dto/log/WorkflowLogQueryCriteria.java | 17 ++ .../domain/workflow/model/WorkflowRun.java | 5 +- .../mybatis/mapper/ExecutionLogMapper.xml | 55 +++++ .../sql/data/06-insert-execution-log-h2.sql | 38 ++++ .../ExecutionLogApiIntegrationTest.java | 201 ++++++++++++++++++ 12 files changed, 421 insertions(+), 1 deletion(-) create mode 100644 apps/user-service/src/main/java/site/icebang/domain/log/mapper/ExecutionLogMapper.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/log/service/ExecutionLogService.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/ExecutionLogSimpleDto.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/ExecutionType.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/TaskExecutionMessagesDto.java create mode 100644 apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/WorkflowLogQueryCriteria.java create mode 100644 apps/user-service/src/main/resources/mybatis/mapper/ExecutionLogMapper.xml create mode 100644 apps/user-service/src/main/resources/sql/data/06-insert-execution-log-h2.sql create mode 100644 apps/user-service/src/test/java/site/icebang/integration/tests/workflow/ExecutionLogApiIntegrationTest.java diff --git a/apps/user-service/src/main/java/site/icebang/domain/log/mapper/ExecutionLogMapper.java b/apps/user-service/src/main/java/site/icebang/domain/log/mapper/ExecutionLogMapper.java new file mode 100644 index 00000000..772c47e2 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/log/mapper/ExecutionLogMapper.java @@ -0,0 +1,13 @@ +package site.icebang.domain.log.mapper; + +import java.util.List; + +import org.apache.ibatis.annotations.Mapper; + +import site.icebang.domain.workflow.dto.ExecutionLogDto; +import site.icebang.domain.workflow.dto.log.WorkflowLogQueryCriteria; + +@Mapper +public interface ExecutionLogMapper { + List selectLogsByCriteria(WorkflowLogQueryCriteria criteria); +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/log/service/ExecutionLogService.java b/apps/user-service/src/main/java/site/icebang/domain/log/service/ExecutionLogService.java new file mode 100644 index 00000000..7cd9a820 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/log/service/ExecutionLogService.java @@ -0,0 +1,21 @@ +package site.icebang.domain.log.service; + +import java.util.List; + +import org.springframework.stereotype.Service; + +import lombok.RequiredArgsConstructor; + +import site.icebang.domain.log.mapper.ExecutionLogMapper; +import site.icebang.domain.workflow.dto.ExecutionLogDto; +import site.icebang.domain.workflow.dto.log.WorkflowLogQueryCriteria; + +@Service +@RequiredArgsConstructor +public class ExecutionLogService { + private final ExecutionLogMapper executionLogMapper; + + public List getRawLogs(WorkflowLogQueryCriteria criteria) { + return executionLogMapper.selectLogsByCriteria(criteria); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowHistoryController.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowHistoryController.java index 07d4f20e..0f8535cf 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowHistoryController.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/controller/WorkflowHistoryController.java @@ -1,14 +1,20 @@ package site.icebang.domain.workflow.controller; +import java.util.List; + import org.springframework.web.bind.annotation.*; +import jakarta.validation.Valid; import lombok.RequiredArgsConstructor; import site.icebang.common.dto.ApiResponse; import site.icebang.common.dto.PageParams; import site.icebang.common.dto.PageResult; +import site.icebang.domain.log.service.ExecutionLogService; import site.icebang.domain.workflow.dto.WorkflowHistoryDTO; import site.icebang.domain.workflow.dto.WorkflowRunDetailResponse; +import site.icebang.domain.workflow.dto.log.ExecutionLogSimpleDto; +import site.icebang.domain.workflow.dto.log.WorkflowLogQueryCriteria; import site.icebang.domain.workflow.service.WorkflowHistoryService; @RestController @@ -16,6 +22,7 @@ @RequiredArgsConstructor public class WorkflowHistoryController { private final WorkflowHistoryService workflowHistoryService; + private final ExecutionLogService executionLogService; @GetMapping("") public ApiResponse> getWorkflowHistoryList( @@ -35,4 +42,11 @@ public ApiResponse getWorkflowRunDetail(@PathVariable WorkflowRunDetailResponse response = workflowHistoryService.getWorkflowRunDetail(runId); return ApiResponse.success(response); } + + @GetMapping("/logs") + public ApiResponse> getTaskExecutionLog( + @Valid @ModelAttribute WorkflowLogQueryCriteria requestDto) { + return ApiResponse.success( + ExecutionLogSimpleDto.from(executionLogService.getRawLogs(requestDto))); + } } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ExecutionLogDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ExecutionLogDto.java index 7c8595a3..cbe6b2f7 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ExecutionLogDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ExecutionLogDto.java @@ -21,4 +21,6 @@ public class ExecutionLogDto { private String logMessage; private Instant executedAt; private Integer durationMs; + private String traceId; + private String errorCode; } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/ExecutionLogSimpleDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/ExecutionLogSimpleDto.java new file mode 100644 index 00000000..152de8e4 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/ExecutionLogSimpleDto.java @@ -0,0 +1,34 @@ +package site.icebang.domain.workflow.dto.log; + +import java.time.Instant; +import java.util.List; +import java.util.stream.Collectors; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import site.icebang.domain.workflow.dto.ExecutionLogDto; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class ExecutionLogSimpleDto { + private String logLevel; + private String logMessage; + private Instant executedAt; + + public static ExecutionLogSimpleDto from(ExecutionLogDto executionLogDto) { + return ExecutionLogSimpleDto.builder() + .logLevel(executionLogDto.getLogLevel()) + .logMessage(executionLogDto.getLogMessage()) + .executedAt(executionLogDto.getExecutedAt()) + .build(); + } + + public static List from(List executionLogList) { + return executionLogList.stream().map(ExecutionLogSimpleDto::from).collect(Collectors.toList()); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/ExecutionType.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/ExecutionType.java new file mode 100644 index 00000000..e7dbd659 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/ExecutionType.java @@ -0,0 +1,7 @@ +package site.icebang.domain.workflow.dto.log; + +public enum ExecutionType { + WORKFLOW, + JOB, + TASK +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/TaskExecutionMessagesDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/TaskExecutionMessagesDto.java new file mode 100644 index 00000000..4f51f07d --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/TaskExecutionMessagesDto.java @@ -0,0 +1,15 @@ +package site.icebang.domain.workflow.dto.log; + +import java.util.List; +import java.util.stream.Collectors; + +import site.icebang.domain.workflow.dto.ExecutionLogDto; + +public record TaskExecutionMessagesDto(List messages) { + public static TaskExecutionMessagesDto from(List executionLogList) { + List messages = + executionLogList.stream().map(ExecutionLogDto::getLogMessage).collect(Collectors.toList()); + + return new TaskExecutionMessagesDto(messages); + } +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/WorkflowLogQueryCriteria.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/WorkflowLogQueryCriteria.java new file mode 100644 index 00000000..f2c2ed06 --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/log/WorkflowLogQueryCriteria.java @@ -0,0 +1,17 @@ +package site.icebang.domain.workflow.dto.log; + +import java.math.BigInteger; + +import jakarta.validation.constraints.Pattern; +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +public class WorkflowLogQueryCriteria { + private final String traceId; + private final BigInteger sourceId; + + @Pattern(regexp = "^(WORKFLOW|JOB|TASK)$", message = "실행 타입은 WORKFLOW, JOB, TASK 중 하나여야 합니다") + private final String executionType; +} diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/WorkflowRun.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/WorkflowRun.java index 5741e77b..1c3a0796 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/WorkflowRun.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/WorkflowRun.java @@ -3,6 +3,8 @@ import java.time.Instant; import java.util.UUID; +import org.slf4j.MDC; + import lombok.Getter; import lombok.NoArgsConstructor; @@ -20,7 +22,8 @@ public class WorkflowRun { private WorkflowRun(Long workflowId) { this.workflowId = workflowId; - this.traceId = UUID.randomUUID().toString(); // 고유 추적 ID 생성 + // MDC에서 현재 요청의 traceId를 가져오거나, 없으면 새로 생성 + this.traceId = MDC.get("traceId") != null ? MDC.get("traceId") : UUID.randomUUID().toString(); this.status = "RUNNING"; this.startedAt = Instant.now(); this.createdAt = this.startedAt; diff --git a/apps/user-service/src/main/resources/mybatis/mapper/ExecutionLogMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/ExecutionLogMapper.xml new file mode 100644 index 00000000..4c1ff830 --- /dev/null +++ b/apps/user-service/src/main/resources/mybatis/mapper/ExecutionLogMapper.xml @@ -0,0 +1,55 @@ + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/data/06-insert-execution-log-h2.sql b/apps/user-service/src/main/resources/sql/data/06-insert-execution-log-h2.sql new file mode 100644 index 00000000..8dac68c8 --- /dev/null +++ b/apps/user-service/src/main/resources/sql/data/06-insert-execution-log-h2.sql @@ -0,0 +1,38 @@ +-- execution_log 테스트 데이터 (H2용) +INSERT INTO execution_log (execution_type, source_id, log_level, executed_at, log_message, trace_id, run_id, status, duration_ms, error_code) VALUES +('WORKFLOW', 1, 'INFO', '2025-09-26 12:42:02.000', '========== 워크플로우 실행 시작: WorkflowId=1 ==========', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('WORKFLOW', 1, 'INFO', '2025-09-26 12:42:02.000', '총 2개의 Job을 순차적으로 실행합니다.', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('JOB', 1, 'INFO', '2025-09-26 12:42:02.000', '---------- Job 실행 시작: JobId=1, JobRunId=1 ----------', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('JOB', 1, 'INFO', '2025-09-26 12:42:02.000', 'Job (JobRunId=1) 내 총 7개의 Task를 순차 실행합니다.', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 1, 'INFO', '2025-09-26 12:42:02.000', 'Task 실행 시작: TaskId=1, Name=키워드 검색 태스크', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 1, 'INFO', '2025-09-26 12:42:02.000', 'Task 실행 시도 #1: TaskId=1, TaskRunId=1', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 1, 'ERROR', '2025-09-26 12:42:02.000', 'Task 최종 실패: TaskRunId=1, Message=FastApiAdapter 호출에 실패했습니다.', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 2, 'INFO', '2025-09-26 12:42:02.000', 'Task 실행 시작: TaskId=2, Name=상품 검색 태스크', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 2, 'INFO', '2025-09-26 12:42:02.000', 'Task 실행 시도 #1: TaskId=2, TaskRunId=2', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 2, 'ERROR', '2025-09-26 12:42:02.000', 'Task 최종 실패: TaskRunId=2, Message=FastApiAdapter 호출에 실패했습니다.', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 3, 'INFO', '2025-09-26 12:42:02.000', 'Task 실행 시작: TaskId=3, Name=상품 매칭 태스크', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 3, 'INFO', '2025-09-26 12:42:02.000', 'Task 실행 시도 #1: TaskId=3, TaskRunId=3', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 3, 'ERROR', '2025-09-26 12:42:02.000', 'Task 최종 실패: TaskRunId=3, Message=FastApiAdapter 호출에 실패했습니다.', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 4, 'INFO', '2025-09-26 12:42:02.000', 'Task 실행 시작: TaskId=4, Name=상품 유사도 분석 태스크', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 4, 'INFO', '2025-09-26 12:42:02.000', 'Task 실행 시도 #1: TaskId=4, TaskRunId=4', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 4, 'ERROR', '2025-09-26 12:42:02.000', 'Task 최종 실패: TaskRunId=4, Message=FastApiAdapter 호출에 실패했습니다.', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 5, 'INFO', '2025-09-26 12:42:02.000', 'Task 실행 시작: TaskId=5, Name=상품 정보 크롤링 태스크', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 5, 'INFO', '2025-09-26 12:42:02.000', 'Task 실행 시도 #1: TaskId=5, TaskRunId=5', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 5, 'ERROR', '2025-09-26 12:42:02.000', 'Task 최종 실패: TaskRunId=5, Message=FastApiAdapter 호출에 실패했습니다.', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 6, 'INFO', '2025-09-26 12:42:02.000', 'Task 실행 시작: TaskId=6, Name=S3 업로드 태스크', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 6, 'INFO', '2025-09-26 12:42:02.000', 'Task 실행 시도 #1: TaskId=6, TaskRunId=6', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 6, 'ERROR', '2025-09-26 12:42:02.000', 'Task 최종 실패: TaskRunId=6, Message=FastApiAdapter 호출에 실패했습니다.', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 7, 'INFO', '2025-09-26 12:42:02.000', 'Task 실행 시작: TaskId=7, Name=상품 선택 태스크', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 7, 'INFO', '2025-09-26 12:42:02.000', 'Task 실행 시도 #1: TaskId=7, TaskRunId=7', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 7, 'ERROR', '2025-09-26 12:42:03.000', 'Task 최종 실패: TaskRunId=7, Message=FastApiAdapter 호출에 실패했습니다.', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('JOB', 1, 'ERROR', '2025-09-26 12:42:03.000', 'Job 실행 실패: JobRunId=1', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('JOB', 2, 'INFO', '2025-09-26 12:42:03.000', '---------- Job 실행 시작: JobId=2, JobRunId=2 ----------', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('JOB', 2, 'INFO', '2025-09-26 12:42:03.000', 'Job (JobRunId=2) 내 총 2개의 Task를 순차 실행합니다.', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 8, 'INFO', '2025-09-26 12:42:03.000', 'Task 실행 시작: TaskId=8, Name=블로그 RAG 생성 태스크', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 8, 'INFO', '2025-09-26 12:42:03.000', 'Task 실행 시도 #1: TaskId=8, TaskRunId=8', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 8, 'ERROR', '2025-09-26 12:42:03.000', 'Task 최종 실패: TaskRunId=8, Message=FastApiAdapter 호출에 실패했습니다.', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 9, 'INFO', '2025-09-26 12:42:03.000', 'Task 실행 시작: TaskId=9, Name=블로그 발행 태스크', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 9, 'INFO', '2025-09-26 12:42:03.000', 'Task 실행 시도 #1: TaskId=9, TaskRunId=9', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('TASK', 9, 'ERROR', '2025-09-26 12:42:03.000', 'Task 최종 실패: TaskRunId=9, Message=FastApiAdapter 호출에 실패했습니다.', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('JOB', 2, 'ERROR', '2025-09-26 12:42:03.000', 'Job 실행 실패: JobRunId=2', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL), +('WORKFLOW', 1, 'INFO', '2025-09-26 12:42:03.000', '========== 워크플로우 실행 실패 : WorkflowRunId=1 ==========', '68d60b8a2f4cd59a880cf71f189b4ca5', NULL, NULL, NULL, NULL); \ No newline at end of file diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/ExecutionLogApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/ExecutionLogApiIntegrationTest.java new file mode 100644 index 00000000..39203494 --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/ExecutionLogApiIntegrationTest.java @@ -0,0 +1,201 @@ +package site.icebang.integration.tests.workflow; + +import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; +import static com.epages.restdocs.apispec.ResourceDocumentation.*; +import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; +import static org.springframework.restdocs.operation.preprocess.Preprocessors.*; +import static org.springframework.restdocs.payload.PayloadDocumentation.*; +import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.restdocs.payload.JsonFieldType; +import org.springframework.security.test.context.support.WithUserDetails; +import org.springframework.test.context.jdbc.Sql; +import org.springframework.transaction.annotation.Transactional; + +import com.epages.restdocs.apispec.ResourceSnippetParameters; + +import site.icebang.integration.setup.support.IntegrationTestSupport; + +@Sql( + value = { + "classpath:sql/data/01-insert-internal-users.sql", + "classpath:sql/data/06-insert-execution-log-h2.sql" + }, + executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) +@Transactional +public class ExecutionLogApiIntegrationTest extends IntegrationTestSupport { + + @Test + @DisplayName("실행 로그 조회 성공 - 전체 조회") + @WithUserDetails("admin@icebang.site") + void getTaskExecutionLog_success() throws Exception { + // when & then + mockMvc + .perform( + get(getApiUrlForDocs("/v0/workflow-runs/logs")) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.status").value("OK")) + .andExpect(jsonPath("$.message").value("OK")) + .andExpect(jsonPath("$.data").exists()) + .andExpect(jsonPath("$.data").isArray()) + .andExpect(jsonPath("$.data.length()").value(36)) + // 첫 번째 로그 검증 + .andExpect(jsonPath("$.data[0].logLevel").exists()) + .andExpect(jsonPath("$.data[0].logMessage").exists()) + .andExpect(jsonPath("$.data[0].executedAt").exists()) + .andDo( + document( + "execution-log-all", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Workflow History") + .summary("실행 로그 전체 조회") + .description("워크플로우 실행 로그를 상세 정보와 함께 조회합니다") + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부"), + fieldWithPath("data").type(JsonFieldType.ARRAY).description("실행 로그 목록"), + fieldWithPath("data[].logLevel") + .type(JsonFieldType.STRING) + .description("로그 레벨 (INFO, ERROR, WARN, DEBUG)"), + fieldWithPath("data[].logMessage") + .type(JsonFieldType.STRING) + .description("로그 메시지"), + fieldWithPath("data[].executedAt") + .type(JsonFieldType.STRING) + .description("실행 시간 (UTC ISO-8601)"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태")) + .build()))); + } + + @Test + @DisplayName("실행 로그 조회 성공 - traceId 필터링") + @WithUserDetails("admin@icebang.site") + void getTaskExecutionLog_withTraceId_success() throws Exception { + // when & then + mockMvc + .perform( + get(getApiUrlForDocs("/v0/workflow-runs/logs")) + .param("traceId", "68d60b8a2f4cd59a880cf71f189b4ca5") + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.data").isArray()) + .andExpect(jsonPath("$.data.length()").value(36)) + .andDo( + document( + "execution-log-by-trace-id", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Workflow History") + .summary("실행 로그 조회 - traceId 필터") + .description("특정 traceId로 워크플로우 실행 로그를 필터링하여 조회합니다") + .queryParameters( + parameterWithName("traceId").description("추적 ID").optional()) + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부"), + fieldWithPath("data").type(JsonFieldType.ARRAY).description("실행 로그 목록"), + fieldWithPath("data[].logLevel") + .type(JsonFieldType.STRING) + .description("로그 레벨 (INFO, ERROR, WARN, DEBUG)"), + fieldWithPath("data[].logMessage") + .type(JsonFieldType.STRING) + .description("로그 메시지"), + fieldWithPath("data[].executedAt") + .type(JsonFieldType.STRING) + .description("실행 시간 (UTC ISO-8601)"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태")) + .build()))); + } + + @Test + @DisplayName("실행 로그 조회 성공 - executionType 필터링") + @WithUserDetails("admin@icebang.site") + void getTaskExecutionLog_withExecutionType_success() throws Exception { + // when & then + mockMvc + .perform( + get(getApiUrlForDocs("/v0/workflow-runs/logs")) + .param("executionType", "TASK") + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.data").isArray()) + .andExpect(jsonPath("$.data.length()").value(27)) // TASK 타입 로그만 + .andDo( + document( + "execution-log-by-execution-type", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("Workflow History") + .summary("실행 로그 조회 - executionType 필터") + .description("특정 executionType으로 워크플로우 실행 로그를 필터링하여 조회합니다") + .queryParameters( + parameterWithName("executionType") + .description("실행 타입 (WORKFLOW, JOB, TASK)") + .optional()) + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부"), + fieldWithPath("data").type(JsonFieldType.ARRAY).description("실행 로그 목록"), + fieldWithPath("data[].logLevel") + .type(JsonFieldType.STRING) + .description("로그 레벨 (INFO, ERROR, WARN, DEBUG)"), + fieldWithPath("data[].logMessage") + .type(JsonFieldType.STRING) + .description("로그 메시지"), + fieldWithPath("data[].executedAt") + .type(JsonFieldType.STRING) + .description("실행 시간 (UTC ISO-8601)"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태")) + .build()))); + } + + @Test + @DisplayName("실행 로그 조회 실패 - 잘못된 executionType") + @WithUserDetails("admin@icebang.site") + void getTaskExecutionLog_withInvalidExecutionType_fail() throws Exception { + // when & then + mockMvc + .perform( + get("/v0/workflow-runs/logs") + .param("executionType", "INVALID_TYPE") + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isBadRequest()) + .andExpect(jsonPath("$.success").value(false)); + } +} From 61150f98ee9c16499b560f464ddf6f297a77f4f4 Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Sat, 27 Sep 2025 16:32:37 +0900 Subject: [PATCH 14/18] =?UTF-8?q?User=20=EA=B4=80=EB=A0=A8=20api=20test=20?= =?UTF-8?q?=EB=B0=8F=20api=20document=20=EC=9E=91=EC=84=B1=20(#217)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * test: User 개인 정보 조회 api * fix: Check email reqeust Mapper에서 user가 아닌 users로 table 이름을 사용했던 버그 `@NoArgsConstructor`를 통해 직렬화가 실패하던 버그 * test: User check email api --- .../domain/user/dto/CheckEmailRequest.java | 2 + .../domain/user/mapper/UserMapper.java | 2 +- .../tests/user/UserApiIntegrationTest.java | 227 ++++++++++++++++++ 3 files changed, 230 insertions(+), 1 deletion(-) create mode 100644 apps/user-service/src/test/java/site/icebang/integration/tests/user/UserApiIntegrationTest.java diff --git a/apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailRequest.java b/apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailRequest.java index f3b2c2a1..fb4c9844 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailRequest.java +++ b/apps/user-service/src/main/java/site/icebang/domain/user/dto/CheckEmailRequest.java @@ -4,9 +4,11 @@ import jakarta.validation.constraints.NotBlank; import lombok.AllArgsConstructor; import lombok.Data; +import lombok.NoArgsConstructor; @Data @AllArgsConstructor +@NoArgsConstructor public class CheckEmailRequest { @NotBlank(message = "이메일은 필수입니다") @Email(message = "올바른 이메일 형식이 아닙니다") diff --git a/apps/user-service/src/main/java/site/icebang/domain/user/mapper/UserMapper.java b/apps/user-service/src/main/java/site/icebang/domain/user/mapper/UserMapper.java index d2e14012..75df3852 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/user/mapper/UserMapper.java +++ b/apps/user-service/src/main/java/site/icebang/domain/user/mapper/UserMapper.java @@ -6,6 +6,6 @@ @Mapper public interface UserMapper { - @Select("SELECT COUNT(1) > 0 FROM users WHERE email = #{email}") + @Select("SELECT COUNT(1) > 0 FROM user WHERE email = #{email}") boolean existsByEmail(@Param("email") String email); } diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/user/UserApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/user/UserApiIntegrationTest.java new file mode 100644 index 00000000..8b958437 --- /dev/null +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/user/UserApiIntegrationTest.java @@ -0,0 +1,227 @@ +package site.icebang.integration.tests.user; + +import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; +import static com.epages.restdocs.apispec.ResourceDocumentation.*; +import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; +import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; +import static org.springframework.restdocs.operation.preprocess.Preprocessors.*; +import static org.springframework.restdocs.payload.PayloadDocumentation.*; +import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.http.MediaType; +import org.springframework.restdocs.payload.JsonFieldType; +import org.springframework.security.test.context.support.WithUserDetails; +import org.springframework.test.context.jdbc.Sql; +import org.springframework.transaction.annotation.Transactional; + +import com.epages.restdocs.apispec.ResourceSnippetParameters; + +import site.icebang.integration.setup.support.IntegrationTestSupport; + +@Sql( + value = { + "classpath:sql/data/01-insert-internal-users.sql", + "classpath:sql/data/02-insert-external-users.sql" + }, + executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) +@Transactional +public class UserApiIntegrationTest extends IntegrationTestSupport { + + @Test + @DisplayName("유저 자신의 정보 조회 성공") + @WithUserDetails("admin@icebang.site") + void getUserProfile_success() throws Exception { + // when & then + mockMvc + .perform( + get(getApiUrlForDocs("/v0/users/me")) + .contentType(MediaType.APPLICATION_JSON) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.status").value("OK")) + .andExpect(jsonPath("$.message").value("OK")) + .andExpect(jsonPath("$.data").exists()) + .andExpect(jsonPath("$.data.id").exists()) + .andExpect(jsonPath("$.data.email").value("admin@icebang.site")) + .andExpect(jsonPath("$.data.name").exists()) + .andExpect(jsonPath("$.data.roles").exists()) + .andExpect(jsonPath("$.data.status").value("ACTIVE")) + .andDo( + document( + "user-profile", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("User") + .summary("사용자 프로필 조회") + .description("현재 로그인한 사용자의 프로필 정보를 조회합니다") + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부"), + fieldWithPath("data").type(JsonFieldType.OBJECT).description("사용자 정보"), + fieldWithPath("data.id") + .type(JsonFieldType.NUMBER) + .description("사용자 ID"), + fieldWithPath("data.email") + .type(JsonFieldType.STRING) + .description("사용자 이메일"), + fieldWithPath("data.name") + .type(JsonFieldType.STRING) + .description("사용자 이름"), + fieldWithPath("data.roles") + .type(JsonFieldType.ARRAY) + .description("사용자 권한 목록"), + fieldWithPath("data.status") + .type(JsonFieldType.STRING) + .description("사용자 상태 (ACTIVE, INACTIVE)"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태")) + .build()))); + } + + @Test + @DisplayName("이메일 중복 검사 성공 - 사용 가능한 이메일") + @WithUserDetails("admin@icebang.site") + void checkEmailAvailable_success() throws Exception { + String requestBody = + """ + { + "email": "newuser@example.com" + } + """; + + // when & then + mockMvc + .perform( + post(getApiUrlForDocs("/v0/users/check-email")) + .contentType(MediaType.APPLICATION_JSON) + .content(requestBody) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andDo(print()) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.status").value("OK")) + .andExpect(jsonPath("$.message").value("사용 가능한 이메일입니다.")) + .andExpect(jsonPath("$.data").exists()) + .andExpect(jsonPath("$.data.available").value(true)) + .andDo( + document( + "check-email-available", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("User") + .summary("이메일 중복 검사") + .description("사용자 회원가입 전 이메일 중복 여부를 확인합니다") + .requestFields( + fieldWithPath("email") + .type(JsonFieldType.STRING) + .description("검사할 이메일 주소")) + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부"), + fieldWithPath("data").type(JsonFieldType.OBJECT).description("응답 데이터"), + fieldWithPath("data.available") + .type(JsonFieldType.BOOLEAN) + .description("이메일 사용 가능 여부 (true: 사용 가능, false: 이미 사용 중)"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태")) + .build()))); + } + + @Test + @DisplayName("이메일 중복 검사 성공 - 이미 사용 중인 이메일") + @WithUserDetails("admin@icebang.site") + void checkEmailAvailable_alreadyExists() throws Exception { + String requestBody = + """ + { + "email": "admin@icebang.site" + } + """; + + // when & then + mockMvc + .perform( + post(getApiUrlForDocs("/v0/users/check-email")) + .contentType(MediaType.APPLICATION_JSON) + .content(requestBody) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + .andExpect(jsonPath("$.status").value("OK")) + .andExpect(jsonPath("$.message").value("이미 가입된 이메일입니다.")) + .andExpect(jsonPath("$.data").exists()) + .andExpect(jsonPath("$.data.available").value(false)) + .andDo( + document( + "check-email-unavailable", + preprocessRequest(prettyPrint()), + preprocessResponse(prettyPrint()), + resource( + ResourceSnippetParameters.builder() + .tag("User") + .summary("이메일 중복 검사 - 사용 불가") + .description("이미 가입된 이메일에 대한 중복 검사 결과") + .requestFields( + fieldWithPath("email") + .type(JsonFieldType.STRING) + .description("검사할 이메일 주소")) + .responseFields( + fieldWithPath("success") + .type(JsonFieldType.BOOLEAN) + .description("요청 성공 여부"), + fieldWithPath("data").type(JsonFieldType.OBJECT).description("응답 데이터"), + fieldWithPath("data.available") + .type(JsonFieldType.BOOLEAN) + .description("이메일 사용 가능 여부 (true: 사용 가능, false: 이미 사용 중)"), + fieldWithPath("message") + .type(JsonFieldType.STRING) + .description("응답 메시지"), + fieldWithPath("status") + .type(JsonFieldType.STRING) + .description("HTTP 상태")) + .build()))); + } + + @Test + @DisplayName("이메일 중복 검사 실패 - 잘못된 이메일 형식") + @WithUserDetails("admin@icebang.site") + void checkEmailAvailable_invalidFormat() throws Exception { + String requestBody = """ + { + "email": "invalid-email" + } + """; + + // when & then + mockMvc + .perform( + post("/v0/users/check-email") + .contentType(MediaType.APPLICATION_JSON) + .content(requestBody) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isBadRequest()) + .andExpect(jsonPath("$.success").value(false)); + } +} From cfc639785b85268dfc4aee16ea84cf763b115272 Mon Sep 17 00:00:00 2001 From: Yousung Jung Date: Sat, 27 Sep 2025 16:33:27 +0900 Subject: [PATCH 15/18] =?UTF-8?q?Gradle=20=EC=BA=90=EC=8B=B1=EC=9D=84=20?= =?UTF-8?q?=ED=86=B5=ED=95=B4=20CI=20(Java)=20=EC=86=8D=EB=8F=84=20?= =?UTF-8?q?=EA=B0=9C=EC=84=A0=20(#218)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor: ci-java에 gradle action 추가 * feat: gradle properties에서 build caching 활성화 - 각 Job 내에서 변경되지 않은 task는 UP-TO-DATE 처리 - Job 간 중복 컴파일은 그대로 두되, Job 내 불필요한 재작업 방지 - 안전하고 검증된 기본 최적화 * refactor: Test, document step 분리 * chore: 쓸모없는 dev container 삭제 * refactor: lint, test step 수행 조건 평가 후 실행 각 step이 자신에게 관게있는 파일 변경 시에만 실행 * chore: 변경점에 따라 step 실행 revert PR sync시에도 base와 변경점을 감지 - 소스 변경: 모든 테스트 영향 (컴파일 필요) - 테스트 변경: 빌드는 필요 - 의존성 복잡: Unit ↔ Integration 경계 모호 - 복잡성 증가 > 성능 향상 - 조건문 길어짐: 가독성 저하 - 멀티 모듈이라면 의미가 있지만 단일 모듈, 의미 없다고 판단 * refactor: Tag 발행 시 cache가 쓰이도록 변경 --- .github/workflows/ci-java.yml | 15 +++++++++++++-- apps/user-service/gradle.properties | 1 + .../site/icebang/TestUserServiceApplication.java | 12 ------------ .../site/icebang/TestcontainersConfiguration.java | 6 ------ 4 files changed, 14 insertions(+), 20 deletions(-) create mode 100644 apps/user-service/gradle.properties delete mode 100644 apps/user-service/src/test/java/site/icebang/TestUserServiceApplication.java delete mode 100644 apps/user-service/src/test/java/site/icebang/TestcontainersConfiguration.java diff --git a/.github/workflows/ci-java.yml b/.github/workflows/ci-java.yml index e3349be5..0cae6a72 100644 --- a/.github/workflows/ci-java.yml +++ b/.github/workflows/ci-java.yml @@ -12,6 +12,7 @@ on: - release/** paths: - "apps/user-service/**" + - ".github/workflows/ci-java.yml" permissions: contents: read @@ -32,12 +33,17 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 + - name: Set up JDK 21 uses: actions/setup-java@v4 with: java-version: '21' distribution: 'temurin' - cache: 'gradle' + + - name: Setup Gradle + uses: gradle/actions/setup-gradle@v3 + with: + cache-read-only: ${{ github.event_name == 'pull_request' }} - name: Grant execute permission for Gradle wrapper run: chmod +x ./gradlew @@ -59,12 +65,17 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 + - name: Set up JDK ${{ matrix.java-version }} uses: actions/setup-java@v4 with: java-version: '${{ matrix.java-version }}' distribution: 'temurin' - cache: 'gradle' + + - name: Setup Gradle + uses: gradle/actions/setup-gradle@v3 + with: + cache-read-only: ${{ github.event_name == 'pull_request' }} - name: Grant execute permission for Gradle wrapper run: chmod +x ./gradlew diff --git a/apps/user-service/gradle.properties b/apps/user-service/gradle.properties new file mode 100644 index 00000000..5f1ed7bb --- /dev/null +++ b/apps/user-service/gradle.properties @@ -0,0 +1 @@ +org.gradle.caching=true \ No newline at end of file diff --git a/apps/user-service/src/test/java/site/icebang/TestUserServiceApplication.java b/apps/user-service/src/test/java/site/icebang/TestUserServiceApplication.java deleted file mode 100644 index ba8c2403..00000000 --- a/apps/user-service/src/test/java/site/icebang/TestUserServiceApplication.java +++ /dev/null @@ -1,12 +0,0 @@ -package site.icebang; - -import org.springframework.boot.SpringApplication; - -public class TestUserServiceApplication { - - public static void main(String[] args) { - SpringApplication.from(UserServiceApplication::main) - .with(TestcontainersConfiguration.class) - .run(args); - } -} diff --git a/apps/user-service/src/test/java/site/icebang/TestcontainersConfiguration.java b/apps/user-service/src/test/java/site/icebang/TestcontainersConfiguration.java deleted file mode 100644 index b9eb7b76..00000000 --- a/apps/user-service/src/test/java/site/icebang/TestcontainersConfiguration.java +++ /dev/null @@ -1,6 +0,0 @@ -package site.icebang; - -import org.springframework.boot.test.context.TestConfiguration; - -@TestConfiguration(proxyBeanMethods = false) -class TestcontainersConfiguration {} From bb2353488e536f92b0c30bafaea0c1b46a554a3f Mon Sep 17 00:00:00 2001 From: can019 Date: Sat, 27 Sep 2025 16:35:04 +0900 Subject: [PATCH 16/18] =?UTF-8?q?chore:=20version=200.1.0-SNAPSHOT?= =?UTF-8?q?=EC=9C=BC=EB=A1=9C=20build.gradle=20update?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/user-service/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/user-service/build.gradle b/apps/user-service/build.gradle index 94750654..16905e8e 100644 --- a/apps/user-service/build.gradle +++ b/apps/user-service/build.gradle @@ -8,7 +8,7 @@ plugins { } group = 'site.icebang' -version = '0.0.1-beta-STABLE' +version = '0.1.0-SNAPSHOT' description = 'Ice bang - fast campus team4' java { From b4a62ff51b9acf83323a7bbd938fe0334ac20786 Mon Sep 17 00:00:00 2001 From: can019 Date: Sat, 27 Sep 2025 16:38:47 +0900 Subject: [PATCH 17/18] =?UTF-8?q?chore:=20Document=20artifcat=20step=20?= =?UTF-8?q?=EB=B6=84=EB=A6=AC?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/ci-java.yml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-java.yml b/.github/workflows/ci-java.yml index 0cae6a72..10165fce 100644 --- a/.github/workflows/ci-java.yml +++ b/.github/workflows/ci-java.yml @@ -89,13 +89,18 @@ jobs: run: | ./gradlew unitTest ./gradlew integrationTest - ./gradlew javadoc if [ "${{ github.base_ref }}" = "main" ] || [[ "${{ github.ref }}" == refs/tags/* ]]; then ./gradlew e2eTest - ./gradlew openapi3 fi working-directory: apps/user-service + - name: Generate document artifacts + run: | + ./gradlew javadoc + if [ "${{ github.base_ref }}" = "main" ] || [[ "${{ github.ref }}" == refs/tags/* ]]; then + ./gradlew openapi3 + fi + - name: Upload build artifacts if: matrix.java-version == '21' && startsWith(github.ref, 'refs/tags/') uses: actions/upload-artifact@v4 From 369e616ba335b56146ce164cd3476c266f054217 Mon Sep 17 00:00:00 2001 From: can019 Date: Sat, 27 Sep 2025 16:43:02 +0900 Subject: [PATCH 18/18] =?UTF-8?q?fix:=20Working=20directory=20document-jav?= =?UTF-8?q?a=20step=EC=97=90=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/ci-java.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci-java.yml b/.github/workflows/ci-java.yml index 10165fce..773b9102 100644 --- a/.github/workflows/ci-java.yml +++ b/.github/workflows/ci-java.yml @@ -100,6 +100,7 @@ jobs: if [ "${{ github.base_ref }}" = "main" ] || [[ "${{ github.ref }}" == refs/tags/* ]]; then ./gradlew openapi3 fi + working-directory: apps/user-service - name: Upload build artifacts if: matrix.java-version == '21' && startsWith(github.ref, 'refs/tags/')