Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/actions/test-app-startup/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ runs:
- name: Test application startup
shell: bash
working-directory: ${{ inputs.working-directory }}
env:
KAFKA_BOOTSTRAP_SERVERS: localhost:9092
run: |
for service in ${{ inputs.services }}; do
echo "Testing Service... [$service]"
Expand Down
13 changes: 8 additions & 5 deletions config/kafka-topics.yml
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
spring:
kafka:
bootstrap-servers: ${KAFKA_BOOTSTRAP_SERVERS:localhost:9092}

bootstrap-servers: ${KAFKA_BOOTSTRAP_SERVERS:kafka1:29092,kafka2:29092,kafka3:29092}
listener:
ack-mode: MANUAL

producer:
compression.type: lz4
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
acks: all
Expand All @@ -24,9 +25,11 @@ spring:
enable-auto-commit: false
auto-offset-reset: earliest
max-poll-records: 50
max.poll.interval.ms: 300000
session.timeout.ms: 15000
heartbeat.interval.ms: 3000
max-poll-interval-ms: 300000
session-timeout-ms: 15000
heartbeat-interval-ms: 3000
properties:
partition.assignment.strategy: org.apache.kafka.clients.consumer.CooperativeStickyAssignor
group:
order: order-group
payment: payment-group
Expand Down
4 changes: 0 additions & 4 deletions connectors/order-outbox.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,6 @@
"transforms.outbox.table.field.event.payload": "payload",
"transforms.outbox.route.by.field": "event_type",
"transforms.outbox.route.topic.replacement": "${routedByValue}",
"key.converter": "org.apache.kafka.connect.json.JsonConverter",
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
"key.converter.schemas.enable": "false",
"value.converter.schemas.enable": "false",
"transforms.outbox.table.expand.json.payload": "true",
"producer.acks": "all",
"producer.enable.idempotence": "true",
Expand Down
4 changes: 0 additions & 4 deletions connectors/payment-outbox.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,6 @@
"transforms.outbox.table.field.event.payload": "payload",
"transforms.outbox.route.by.field": "event_type",
"transforms.outbox.route.topic.replacement": "${routedByValue}",
"key.converter": "org.apache.kafka.connect.json.JsonConverter",
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
"key.converter.schemas.enable": "false",
"value.converter.schemas.enable": "false",
"transforms.outbox.table.expand.json.payload": "true",
"producer.acks": "all",
"producer.enable.idempotence": "true",
Expand Down
40 changes: 22 additions & 18 deletions connectors/register-connectors.sh
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,27 +1,31 @@
#!/bin/sh

TARGET_URL=${CONNECT_URL:-"http://connect:8083"}
TARGET_URL=${CONNECT_URL:-"http://connect1:8083"}

echo "Waiting for Kafka Connect..."
while [ $(curl -s -o /dev/null -w "%{http_code}" $TARGET_URL) -ne 200 ]; do
echo "Waiting for Kafka Connect at $TARGET_URL..."

until [ $(curl -s -o /dev/null -w "%{http_code}" "$TARGET_URL") -eq 200 ]; do
echo "Connect is not ready yet... (waiting 3s)"
sleep 3
done

echo "Registering connectors from /configs..."
echo " Kafka Connect is UP. Starting registration..."

for file in /configs/*.json; do
filename=$(basename "$file")
echo "Processing $filename..."

sed -e "s|\${env:DB_HOST}|$DB_HOST|g" \
-e "s|\${env:SPRING_DATASOURCE_USERNAME}|$SPRING_DATASOURCE_USERNAME|g" \
-e "s|\${env:SPRING_DATASOURCE_PASSWORD}|$SPRING_DATASOURCE_PASSWORD|g" \
-e "s|\${env:DB_NAME}|$DB_NAME|g" \
"$file" > "/tmp/$filename"

response=$(curl -s -X POST -H "Content-Type: application/json" \
-d @"/tmp/$filename" \
$TARGET_URL/connectors)
echo "Response for $filename: $response"
connector_name=$(jq -r '.name' "$file")
config_body=$(jq -c '.config' "$file")

echo "Processing Connector: $connector_name"
response=$(curl -s -o /dev/null -w "%{http_code}" -X PUT \
-H "Content-Type: application/json" \
-d "$config_body" \
"$TARGET_URL/connectors/$connector_name/config")

if [ "$response" -eq 200 ] || [ "$response" -eq 201 ]; then
echo "Successfully registered/updated: $connector_name"
else
echo "Failed to register $connector_name (HTTP Status: $response)"
fi
done

echo "ALL connectors Created"
echo "ALL connectors have been processed!"
157 changes: 118 additions & 39 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,92 +31,167 @@ services:
- spot-network

# --- Infrastructure: Event Broker (Kafka) ---
kafka:
kafka1:
image: apache/kafka:4.0.0
container_name: kafka-local
container_name: kafka1
restart: always
environment:
KAFKA_NODE_ID: 1
KAFKA_PROCESS_ROLES: 'broker,controller'
KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka1:19093,2@kafka2:19093,3@kafka3:19093'
KAFKA_LISTENERS: 'INTERNAL://0.0.0.0:29092,EXTERNAL://0.0.0.0:9092,CONTROLLER://0.0.0.0:19093'
KAFKA_ADVERTISED_LISTENERS: 'INTERNAL://kafka1:29092,EXTERNAL://localhost:9092'
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,CONTROLLER:PLAINTEXT'
KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
CLUSTER_ID: 'J9Xz7kQPRYyK8VkqH3mW5A'
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 3
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 3
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 2
KAFKA_DEFAULT_REPLICATION_FACTOR: 3
KAFKA_MIN_INSYNC_REPLICAS: 2
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
KAFKA_NUM_PARTITIONS: 3
KAFKA_HEAP_OPTS: '-Xmx256M -Xms256M'
KAFKA_LOG4J_ROOT_LOGLEVEL: 'WARN'
KAFKA_LOG4J_LOGGERS: 'kafka=WARN,state.change.logger=WARN'
ports:
- "9092:9092"
- "19093:19093"
volumes:
- kafka-data-1:/var/lib/kafka/data
networks:
- spot-network

kafka2:
image: apache/kafka:4.0.0
container_name: kafka2
restart: always
environment:
KAFKA_NODE_ID: 1
KAFKA_NODE_ID: 2
KAFKA_PROCESS_ROLES: 'broker,controller'
KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka1:19093,2@kafka2:19093,3@kafka3:19093'
KAFKA_LISTENERS: 'INTERNAL://0.0.0.0:29092,EXTERNAL://0.0.0.0:9092,CONTROLLER://0.0.0.0:19093'
KAFKA_ADVERTISED_LISTENERS: 'INTERNAL://kafka2:29092,EXTERNAL://localhost:9093'
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,CONTROLLER:PLAINTEXT'
KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
CLUSTER_ID: 'J9Xz7kQPRYyK8VkqH3mW5A'
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 3
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 3
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 2
KAFKA_DEFAULT_REPLICATION_FACTOR: 3
KAFKA_MIN_INSYNC_REPLICAS: 2
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
KAFKA_NUM_PARTITIONS: 3
KAFKA_HEAP_OPTS: '-Xmx256M -Xms256M'
KAFKA_LOG4J_ROOT_LOGLEVEL: 'WARN'
KAFKA_LOG4J_LOGGERS: 'kafka=WARN,state.change.logger=WARN'
ports:
- "9093:9093"
volumes:
- kafka-data-2:/var/lib/kafka/data
networks:
- spot-network

kafka3:
image: apache/kafka:4.0.0
container_name: kafka3
restart: always
environment:
KAFKA_NODE_ID: 3
KAFKA_PROCESS_ROLES: 'broker,controller'
KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka:19093'
KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka1:19093,2@kafka2:19093,3@kafka3:19093'
KAFKA_LISTENERS: 'INTERNAL://0.0.0.0:29092,EXTERNAL://0.0.0.0:9092,CONTROLLER://0.0.0.0:19093'
KAFKA_ADVERTISED_LISTENERS: 'INTERNAL://kafka:29092,EXTERNAL://localhost:9092'
KAFKA_ADVERTISED_LISTENERS: 'INTERNAL://kafka3:29092,EXTERNAL://localhost:9094'
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,CONTROLLER:PLAINTEXT'
KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
CLUSTER_ID: 'J9Xz7kQPRYyK8VkqH3mW5A'
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_DEFAULT_REPLICATION_FACTOR: 1
KAFKA_MIN_INSYNC_REPLICAS: 1
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 3
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 3
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 2
KAFKA_DEFAULT_REPLICATION_FACTOR: 3
KAFKA_MIN_INSYNC_REPLICAS: 2
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
KAFKA_NUM_PARTITIONS: 3
KAFKA_HEAP_OPTS: '-Xmx512M -Xms512M'
KAFKA_HEAP_OPTS: '-Xmx256M -Xms256M'
KAFKA_LOG4J_ROOT_LOGLEVEL: 'WARN'
KAFKA_LOG4J_LOGGERS: 'kafka=WARN,state.change.logger=WARN'
ports:
- "9094:9094"
volumes:
- kafka-data:/var/lib/kafka/data
- kafka-data-3:/var/lib/kafka/data
networks:
- spot-network

connect:
connect1: &connect-common
container_name: connect1
image: quay.io/debezium/connect:3.4.0.Final
container_name: connect
depends_on:
- kafka
- db
ports:
- "8888:8083"
env_file:
- .env
environment:
CONNECT_CONFIG_PROVIDERS: 'env'
CONNECT_CONFIG_PROVIDERS_ENV_CLASS: 'org.apache.kafka.common.config.provider.EnvVarConfigProvider'
BOOTSTRAP_SERVERS: kafka:29092
GROUP_ID: 1
CONFIG_STORAGE_TOPIC: my_connect_configs
OFFSET_STORAGE_TOPIC: my_connect_offsets
STATUS_STORAGE_TOPIC: my_connect_statuses
BOOTSTRAP_SERVERS: kafka1:29092,kafka2:29092,kafka3:29092
GROUP_ID: "spot-connect-group"
CONFIG_STORAGE_TOPIC: "connect_configs"
OFFSET_STORAGE_TOPIC: "connect_offsets"
STATUS_STORAGE_TOPIC: "connect_status"
CONFIG_STORAGE_REPLICATION_FACTOR: 3
OFFSET_STORAGE_REPLICATION_FACTOR: 3
STATUS_STORAGE_REPLICATION_FACTOR: 3
KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_KEY_CONVERTER_SCHEMAS_ENABLE: "false"
CONNECT_VALUE_CONVERTER_SCHEMAS_ENABLE: "false"
LOGGING_LEVEL: 'WARN'
CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR"
depends_on:
- kafka1
- kafka2
- kafka3
- db
networks:
- spot-network

connect2:
<<: *connect-common
container_name: connect2
ports:
- "8889:8083"

connect-init:
image: curlimages/curl:latest
image: alpine:latest
container_name: connect-init
env_file:
- .env
depends_on:
- connect
- connect1
- connect2
networks:
- spot-network
volumes:
- ./connectors:/configs
entrypoint: ["/bin/sh", "/configs/register-connectors.sh"]
entrypoint: ["/bin/sh", "-c", "apk add --no-cache curl jq && chmod +x /configs/register-connectors.sh && /configs/register-connectors.sh"]

kafka-ui:
image: provectuslabs/kafka-ui:latest
container_name: kafka-ui-local
container_name: kafka-ui
ports:
- "8989:8080"
environment:
KAFKA_CLUSTERS_0_NAME: local-spot-cluster
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: 'kafka:29092'
KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: connect
KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: 'http://connect:8083'
KAFKA_CLUSTERS_0_NAME: spot-cluster
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: 'kafka1:29092,kafka2:29092,kafka3:29092'
KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: connect-cluster
KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: 'http://connect1:8083'
LOGGING_LEVEL_ROOT: WARN
LOGGING_LEVEL_COM_PROVECTUS: WARN
depends_on:
- kafka
- kafka1
- kafka2
- kafka3
networks:
- spot-network

Expand Down Expand Up @@ -192,14 +267,16 @@ services:
environment:
- DB_HOST=db
- SPRING_DATA_REDIS_HOST=redis
- KAFKA_BOOTSTRAP_SERVERS=kafka:29092
- KAFKA_BOOTSTRAP_SERVERS=kafka1:29092,kafka2:29092,kafka3:29092
- SPRING_TEMPORAL_CONNECTION_TARGET=temporal:7233
- LOGGING_LEVEL_ROOT=WARN
depends_on:
- db
- redis
- kafka
- temporal
- kafka1
- kafka2
- kafka3
networks:
- spot-network
env_file:
Expand All @@ -219,12 +296,14 @@ services:
environment:
- DB_HOST=db
- SPRING_DATA_REDIS_HOST=redis
- KAFKA_BOOTSTRAP_SERVERS=kafka:29092
- KAFKA_BOOTSTRAP_SERVERS=kafka1:29092,kafka2:29092,kafka3:29092
- LOGGING_LEVEL_ROOT=WARN
depends_on:
- db
- redis
- kafka
- kafka1
- kafka2
- kafka3
networks:
- spot-network

Expand All @@ -240,7 +319,6 @@ services:
environment:
- DB_HOST=db
- SPRING_DATA_REDIS_HOST=redis
- KAFKA_BOOTSTRAP_SERVERS=kafka:29092
- LOGGING_LEVEL_ROOT=WARN
depends_on:
- db
Expand All @@ -262,7 +340,6 @@ services:
environment:
- DB_HOST=db
- SPRING_DATA_REDIS_HOST=redis
- KAFKA_BOOTSTRAP_SERVERS=kafka:29092
- LOGGING_LEVEL_ROOT=WARN
depends_on:
- db
Expand All @@ -277,5 +354,7 @@ networks:
driver: bridge

volumes:
kafka-data:
kafka-data-1:
kafka-data-2:
kafka-data-3:
postgres_data:
4 changes: 2 additions & 2 deletions infra/k8s/apps/spot-order.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,13 @@ spec:
httpGet:
path: /actuator/health
port: 8082
initialDelaySeconds: 30
initialDelaySeconds: 150
periodSeconds: 10
livenessProbe:
httpGet:
path: /actuator/health
port: 8082
initialDelaySeconds: 60
initialDelaySeconds: 180
periodSeconds: 30
volumes:
- name: app-config
Expand Down
Loading