Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 1 addition & 6 deletions config/common.yml
Original file line number Diff line number Diff line change
Expand Up @@ -74,12 +74,7 @@ toss:
customerKey: ${spring.toss.customerKey:${TOSS_CUSTOMER_KEY}}
secretKey: ${spring.toss.secretKey:${TOSS_SECRET_KEY}}
timeout: 10

logging:
level:
org.apache.kafka: WARN
com.zaxxer.hikari: WARN


management:
endpoints:
web:
Expand Down
30 changes: 30 additions & 0 deletions connectors/order-outbox.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
{
"name": "order-outbox-connector",
"config": {
"connector.class": "io.debezium.connector.postgresql.PostgresConnector",
"tasks.max": "1",
"database.hostname": "${env:DB_HOST}",
"database.port": "5432",
"database.user": "${env:SPRING_DATASOURCE_USERNAME}",
"database.password": "${env:SPRING_DATASOURCE_PASSWORD}",
"database.dbname": "${env:DB_NAME}",
"topic.prefix": "order_outbox_cdc",
"plugin.name": "pgoutput",
"slot.name": "order_outbox_slot",
"snapshot.mode": "schema_only",
"table.include.list": "public.p_order_outbox",
"transforms": "outbox",
"transforms.outbox.type": "io.debezium.transforms.outbox.EventRouter",
"transforms.outbox.table.field.event.id": "id",
"transforms.outbox.table.field.event.key": "aggregate_id",
"transforms.outbox.table.field.event.type": "event_type",
"transforms.outbox.table.field.event.payload": "payload",
"transforms.outbox.route.by.field": "event_type",
"transforms.outbox.route.topic.replacement": "${routedByValue}",
"key.converter": "org.apache.kafka.connect.json.JsonConverter",
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
"key.converter.schemas.enable": "false",
"value.converter.schemas.enable": "false",
"transforms.outbox.table.expand.json.payload": "true"
}
}
30 changes: 30 additions & 0 deletions connectors/payment-oubox.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
{
"name": "payment-outbox-connector",
"config": {
"connector.class": "io.debezium.connector.postgresql.PostgresConnector",
"tasks.max": "1",
"database.hostname": "${env:DB_HOST}",
"database.port": "5432",
"database.user": "${env:SPRING_DATASOURCE_USERNAME}",
"database.password": "${env:SPRING_DATASOURCE_PASSWORD}",
"database.dbname": "${env:DB_NAME}",
"topic.prefix": "payment_outbox_cdc",
"plugin.name": "pgoutput",
"slot.name": "payment_outbox_slot",
"snapshot.mode": "schema_only",
"table.include.list": "public.p_payment_outbox",
"transforms": "outbox",
"transforms.outbox.type": "io.debezium.transforms.outbox.EventRouter",
"transforms.outbox.table.field.event.id": "id",
"transforms.outbox.table.field.event.key": "aggregate_id",
"transforms.outbox.table.field.event.type": "event_type",
"transforms.outbox.table.field.event.payload": "payload",
"transforms.outbox.route.by.field": "event_type",
"transforms.outbox.route.topic.replacement": "${routedByValue}",
"key.converter": "org.apache.kafka.connect.json.JsonConverter",
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
"key.converter.schemas.enable": "false",
"value.converter.schemas.enable": "false",
"transforms.outbox.table.expand.json.payload": "true"
}
}
10 changes: 10 additions & 0 deletions connectors/register-connectors.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
#!/bin/sh
echo "Waiting for Kafka Connect..."
while [ $(curl -s -o /dev/null -w "%{http_code}" http://connect:8083) -ne 200 ]; do
sleep 3
done

echo "Registering connectors from /configs..."
for file in /configs/*.json; do
curl -X POST -H "Content-Type: application/json" -d @"$file" http://connect:8083/connectors
done
57 changes: 54 additions & 3 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,12 @@ services:
db:
image: postgres:15-alpine
container_name: local-postgres_db
command: postgres -c max_connections=300
command: >
postgres
-c max_connections=300
-c wal_level=logical
-c max_replication_slots=10
-c max_wal_senders=10
environment:
- POSTGRES_DB=myapp_db
- POSTGRES_USER=admin
Expand Down Expand Up @@ -36,7 +41,7 @@ services:
environment:
KAFKA_NODE_ID: 1
KAFKA_LOG4J_ROOT_LOGLEVEL: 'WARN'
KAFKA_LOG4J_LOGGERS: 'kafka.controller=WARN,state.change.logger=WARN,kafka.producer.async.DefaultEventHandler=WARN'
KAFKA_LOG4J_LOGGERS: 'kafka=WARN,state.change.logger=WARN'
KAFKA_PROCESS_ROLES: 'broker,controller'
KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka:19093'
KAFKA_LISTENERS: 'INTERNAL://0.0.0.0:29092,EXTERNAL://0.0.0.0:9092,CONTROLLER://0.0.0.0:19093'
Expand All @@ -57,15 +62,55 @@ services:
- kafka-data:/var/lib/kafka/data
networks:
- spot-network

connect:
image: debezium/connect:2.4
container_name: connect
depends_on:
- kafka
- db
ports:
- "8888:8083"
env_file:
- .env
environment:
CONNECT_CONFIG_PROVIDERS: 'env'
CONNECT_CONFIG_PROVIDERS_ENV_CLASS: 'org.apache.kafka.common.config.provider.EnvVarConfigProvider'
BOOTSTRAP_SERVERS: kafka:29092
GROUP_ID: 1
CONFIG_STORAGE_TOPIC: my_connect_configs
OFFSET_STORAGE_TOPIC: my_connect_offsets
STATUS_STORAGE_TOPIC: my_connect_statuses
KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_KEY_CONVERTER_SCHEMAS_ENABLE: "false"
CONNECT_VALUE_CONVERTER_SCHEMAS_ENABLE: "false"
LOGGING_LEVEL: 'WARN'
CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR"
networks:
- spot-network

connect-init:
image: curlimages/curl:latest
container_name: connect-init
depends_on:
- connect
networks:
- spot-network
volumes:
- ./connectors:/configs
entrypoint: ["/bin/sh", "/configs/register-connectors.sh"]

kafka-ui:
image: provectuslabs/kafka-ui:latest
container_name: kafka-ui-local
ports:
- "8989:8080" # spot-order(8082)와 충돌을 피하기 위해 8989로 변경
- "8989:8080"
environment:
KAFKA_CLUSTERS_0_NAME: local-spot-cluster
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: 'kafka:29092'
KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: connect
KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: 'http://connect:8083'
LOGGING_LEVEL_ROOT: WARN
LOGGING_LEVEL_COM_PROVECTUS: WARN
depends_on:
Expand All @@ -79,6 +124,8 @@ services:
context: ./spot-gateway
dockerfile: Dockerfile
container_name: spot-gateway
environment:
- LOGGING_LEVEL_ROOT=WARN
ports:
- "8080:8080"
volumes:
Expand All @@ -104,6 +151,7 @@ services:
environment:
- DB_HOST=db
- KAFKA_BOOTSTRAP_SERVERS=kafka:29092 # 필요 시 추가
- LOGGING_LEVEL_ROOT=WARN
volumes:
- ./config:/config
depends_on:
Expand All @@ -127,6 +175,7 @@ services:
environment:
- DB_HOST=db
- KAFKA_BOOTSTRAP_SERVERS=kafka:29092 # 필요 시 추가
- LOGGING_LEVEL_ROOT=WARN
volumes:
- ./config:/config
depends_on:
Expand All @@ -146,6 +195,7 @@ services:
environment:
- DB_HOST=db
- KAFKA_BOOTSTRAP_SERVERS=kafka:29092 # 필요 시 추가
- LOGGING_LEVEL_ROOT=WARN
volumes:
- ./config:/config
depends_on:
Expand All @@ -166,6 +216,7 @@ services:
environment:
- DB_HOST=db
- KAFKA_BOOTSTRAP_SERVERS=kafka:29092 # 필요 시 추가
- LOGGING_LEVEL_ROOT=WARN
volumes:
- ./config:/config
depends_on:
Expand Down
9 changes: 3 additions & 6 deletions kafka.sh
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
#!/bin/bash

echo "===기존 Order,Payment 컨테이너 종료 및 삭제==="
docker compose stop spot-order spot-payment
docker compose rm -f spot-order spot-payment
echo "===기존 컨테이너 종료 및 삭제==="
docker compose down

echo "===Order,Payment 서비스 빌드==="
(cd spot-order && ./gradlew bootJar -x test)
(cd spot-payment && ./gradlew bootJar -x test)

echo "===Order,Payment 빌드 및 시작==="
docker compose build spot-order spot-payment
docker compose up -d spot-order spot-payment

docker compose up --build
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
package com.example.Spot.order.application.service;

import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;

@Slf4j
@Component
@RequiredArgsConstructor
public class OrderOutboxCleanupScheduler {

private final OrderOutboxCleanupService cleanupService;

@Scheduled(cron = "0 0 3 * * *")
public void run() {
try {
cleanupService.cleanup();
} catch (Exception e) {
log.error("[ORDER_OUTBOX-CLEANUP] scheduler failed", e);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package com.example.Spot.order.application.service;

import java.time.LocalDateTime;

import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import com.example.Spot.order.domain.repository.OrderOutboxRepository;

import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;

@Slf4j
@Service
@RequiredArgsConstructor
public class OrderOutboxCleanupService {

private static final int RETENTION_DAYS = 7;
private final OrderOutboxRepository orderOutboxRepository;

@Transactional
public void cleanup() {
LocalDateTime threshold = LocalDateTime.now().minusDays(RETENTION_DAYS);
int deletedCount = orderOutboxRepository.deleteOlderThan(threshold);

if (deletedCount > 0) {
log.info("[ORDER_OUTBOX-CLEANUP] deleted {} rows (threshold={})", deletedCount, threshold);
}
}
}

This file was deleted.

This file was deleted.

Loading
Loading