From b703d7574d4aa3c8589d92fedfe3ece3cf700913 Mon Sep 17 00:00:00 2001 From: Asaf Chen <109060156+asafchen-dig@users.noreply.github.com> Date: Wed, 23 Jul 2025 16:20:43 -0400 Subject: [PATCH] fix: handle division by zero in getActiveLogsRatio and add logging --- .../ClinicActivityDataService.java | 98 +++++++------------ 1 file changed, 33 insertions(+), 65 deletions(-) diff --git a/src/main/java/org/springframework/samples/petclinic/clinicactivity/ClinicActivityDataService.java b/src/main/java/org/springframework/samples/petclinic/clinicactivity/ClinicActivityDataService.java index 7790c49e555..aa756ef6c68 100644 --- a/src/main/java/org/springframework/samples/petclinic/clinicactivity/ClinicActivityDataService.java +++ b/src/main/java/org/springframework/samples/petclinic/clinicactivity/ClinicActivityDataService.java @@ -1,6 +1,4 @@ -package org.springframework.samples.petclinic.clinicactivity; - -import com.github.javafaker.Faker; +package org.springframework.samples.petclinic.clinicactivity;import com.github.javafaker.Faker; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -20,8 +18,7 @@ import org.postgresql.copy.CopyManager; import java.sql.Timestamp; import java.time.LocalDateTime; -import java.time.ZoneId; -import java.time.format.DateTimeFormatter; +import java.time.ZoneId;import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.List; import java.util.Locale; @@ -32,10 +29,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.Random; import java.util.Map; -import java.util.HashMap; - -@Service -public class ClinicActivityDataService { +import java.util.HashMap;@Servicepublic class ClinicActivityDataService { private static final Logger logger = LoggerFactory.getLogger(ClinicActivityDataService.class); private static final int BATCH_SIZE = 1000; @@ -44,9 +38,7 @@ public class ClinicActivityDataService { private final ClinicActivityLogRepository repository; private final JdbcTemplate jdbcTemplate; private final DataSource dataSource; - private final PlatformTransactionManager transactionManager; - - // List of 15 possible activity types + private final PlatformTransactionManager transactionManager;// List of 15 possible activity types private static final List ACTIVITY_TYPES = List.of( "Patient Check-in", "Patient Check-out", "Appointment Scheduling", "Medical Record Update", "Prescription Issuance", "Lab Test Order", "Lab Test Result Review", "Billing Generation", @@ -54,9 +46,7 @@ public class ClinicActivityDataService { "Emergency Alert", "Consultation Note", "Follow-up Reminder" ); private final Random random = new Random(); - private final ExecutorService executorService = Executors.newFixedThreadPool(8); - - @Autowired + private final ExecutorService executorService = Executors.newFixedThreadPool(8);@Autowired public ClinicActivityDataService(ClinicActivityLogRepository repository, @Qualifier("postgresJdbcTemplate") JdbcTemplate jdbcTemplate, @Qualifier("postgresDataSource") DataSource dataSource, @@ -69,12 +59,18 @@ public ClinicActivityDataService(ClinicActivityLogRepository repository, @Transactional public int getActiveLogsRatio(String type) { - var all = repository.countLogsByType(type); - var active = repository.countActiveLogsByType(type); - return active/all; - } - - @Transactional + logger.debug("Calculating active logs ratio for type: {}", type); + var all = repository.countLogsByType(type); + var active = repository.countActiveLogsByType(type); + logger.debug("Total logs: {}, Active logs: {}", all, active); + + if (all == 0) { + logger.warn("No logs found for type: {}. Returning 0 to avoid division by zero.", type); + return 0; + } + + return active * 100 / all; // Convert to percentage + }@Transactional public void cleanupActivityLogs() { logger.info("Received request to clean up all clinic activity logs."); long startTime = System.currentTimeMillis(); @@ -86,9 +82,7 @@ public void cleanupActivityLogs() { logger.error("Error during clinic activity log cleanup", e); throw new RuntimeException("Error cleaning up activity logs: " + e.getMessage(), e); } - } - - @Transactional + }@Transactional public void populateData(int totalEntries) { long startTime = System.currentTimeMillis(); Connection con = null; @@ -96,9 +90,7 @@ public void populateData(int totalEntries) { con = DataSourceUtils.getConnection(dataSource); String databaseProductName = con.getMetaData().getDatabaseProductName(); DataSourceUtils.releaseConnection(con, dataSource); - con = null; - - if ("PostgreSQL".equalsIgnoreCase(databaseProductName)) { + con = null;if ("PostgreSQL".equalsIgnoreCase(databaseProductName)) { logger.info("Using PostgreSQL COPY for data population of {} entries.", totalEntries); populateDataWithCopyToNewTransaction(totalEntries); } else { @@ -113,11 +105,8 @@ public void populateData(int totalEntries) { DataSourceUtils.releaseConnection(con, dataSource); } } - long endTime = System.currentTimeMillis(); - logger.info("Finished data population for {} clinic activity logs in {} ms.", totalEntries, (endTime - startTime)); - } - - private void populateDataWithCopyToNewTransaction(int totalEntries) throws Exception { + long endTime = System.currentTimeMillis();logger.info("Finished data population for {} clinic activity logs in {} ms.", totalEntries, (endTime - startTime)); + }private void populateDataWithCopyToNewTransaction(int totalEntries) throws Exception { DefaultTransactionDefinition def = new DefaultTransactionDefinition(); def.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); TransactionStatus status = transactionManager.getTransaction(def); @@ -127,9 +116,7 @@ private void populateDataWithCopyToNewTransaction(int totalEntries) throws Excep Faker faker = new Faker(new Locale("en-US")); DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); StringBuilder sb = new StringBuilder(); - CopyManager copyManager = connection.unwrap(PGConnection.class).getCopyAPI(); - - for (int i = 0; i < totalEntries; i++) { + CopyManager copyManager = connection.unwrap(PGConnection.class).getCopyAPI();for (int i = 0; i < totalEntries; i++) { String activityType = ACTIVITY_TYPES.get(random.nextInt(ACTIVITY_TYPES.size())); int numericVal = faker.number().numberBetween(1, 100_000); String ts = dtf.format(LocalDateTime.ofInstant( @@ -141,9 +128,7 @@ private void populateDataWithCopyToNewTransaction(int totalEntries) throws Excep .append(numericVal).append(',') .append(csv(ts)).append(',') .append(statusFlag).append(',') - .append(csv(payload)).append('\n'); - - if ((i + 1) % COPY_FLUSH_EVERY == 0 || (i + 1) == totalEntries) { + .append(csv(payload)).append('\n');if ((i + 1) % COPY_FLUSH_EVERY == 0 || (i + 1) == totalEntries) { copyManager.copyIn("COPY clinic_activity_logs (activity_type, numeric_value, event_timestamp, status_flag, payload) FROM STDIN WITH (FORMAT csv)", new java.io.StringReader(sb.toString())); sb.setLength(0); if (logger.isInfoEnabled()){ @@ -163,9 +148,7 @@ private void populateDataWithCopyToNewTransaction(int totalEntries) throws Excep DataSourceUtils.releaseConnection(connection, dataSource); } } - } - - private void populateDataWithJdbcBatchInNewTransaction(int totalEntries) { + }private void populateDataWithJdbcBatchInNewTransaction(int totalEntries) { DefaultTransactionDefinition def = new DefaultTransactionDefinition(); def.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); TransactionStatus status = transactionManager.getTransaction(def); @@ -177,8 +160,7 @@ private void populateDataWithJdbcBatchInNewTransaction(int totalEntries) { for (int j = 0; j < BATCH_SIZE && i < totalEntries; j++, i++) { String activityType = ACTIVITY_TYPES.get(random.nextInt(ACTIVITY_TYPES.size())); int numericVal = faker.number().numberBetween(1, 100_000); - Timestamp eventTimestamp = Timestamp.from( - faker.date().past(5 * 365, TimeUnit.DAYS).toInstant().atZone(ZoneId.systemDefault()).toInstant() + Timestamp eventTimestamp = Timestamp.from(faker.date().past(5 * 365, TimeUnit.DAYS).toInstant().atZone(ZoneId.systemDefault()).toInstant() ); boolean statusFlag = faker.bool().bool(); String payload = String.join(" ", faker.lorem().paragraphs(faker.number().numberBetween(1, 3))); @@ -195,13 +177,10 @@ private void populateDataWithJdbcBatchInNewTransaction(int totalEntries) { } catch (Exception e) { if (!status.isCompleted()) { transactionManager.rollback(status); - } - logger.error("Error during JDBC batch population with new transaction", e); + }logger.error("Error during JDBC batch population with new transaction", e); throw new RuntimeException("Error during JDBC batch population with new transaction: " + e.getMessage(), e); } - } - - private String csv(String value) { + }private String csv(String value) { if (value == null) { return ""; } @@ -219,9 +198,7 @@ public void createIOIntensiveLoad(int durationMinutes, int numThreads, int limit AtomicInteger globalOperationCount = new AtomicInteger(0); List threads = new ArrayList<>(); - logger.info("Creating {} I/O intensive threads with {} record limit per query...", numThreads, limit); - - // Create I/O intensive threads + logger.info("Creating {} I/O intensive threads with {} record limit per query...", numThreads, limit);// Create I/O intensive threads for (int t = 0; t < numThreads; t++) { final int threadId = t; Thread ioThread = new Thread(() -> { @@ -240,9 +217,7 @@ public void createIOIntensiveLoad(int durationMinutes, int numThreads, int limit logger.info("Starting all {} I/O intensive threads...", numThreads); for (Thread thread : threads) { thread.start(); - } - - // Wait for all threads to complete + }// Wait for all threads to complete for (Thread thread : threads) { try { thread.join(); @@ -260,9 +235,7 @@ public void createIOIntensiveLoad(int durationMinutes, int numThreads, int limit logger.error("Error during I/O intensive load test", e); throw new RuntimeException("Error during I/O intensive load test: " + e.getMessage(), e); } - } - - private void executeIOIntensiveThread(int threadId, long endTime, AtomicInteger globalOperationCount, int limit) { + }private void executeIOIntensiveThread(int threadId, long endTime, AtomicInteger globalOperationCount, int limit) { Random random = new Random(); Faker faker = new Faker(new Locale("en-US")); int localOperationCount = 0; @@ -278,10 +251,7 @@ private void executeIOIntensiveThread(int threadId, long endTime, AtomicInteger "FROM clinic_activity_logs " + "WHERE LENGTH(payload) > 100 " + "ORDER BY random()" + - "LIMIT " + limit); - - - localOperationCount++; + "LIMIT " + limit);localOperationCount++; int currentGlobalCount = globalOperationCount.incrementAndGet(); // Log progress every 100 operations per thread @@ -295,9 +265,7 @@ private void executeIOIntensiveThread(int threadId, long endTime, AtomicInteger // But avoid overwhelming the system with a tiny yield if (localOperationCount % 50 == 0) { Thread.yield(); - } - - } catch (Exception e) { + }} catch (Exception e) { logger.error("Error in I/O operation for thread {}", threadId, e); try { Thread.sleep(10); // Brief pause on error @@ -310,4 +278,4 @@ private void executeIOIntensiveThread(int threadId, long endTime, AtomicInteger logger.info("I/O Thread {} completed {} total I/O operations", threadId, localOperationCount); } -} +} \ No newline at end of file