Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
package org.springframework.samples.petclinic.clinicactivity;

import com.github.javafaker.Faker;
package org.springframework.samples.petclinic.clinicactivity;import com.github.javafaker.Faker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
Expand All @@ -20,8 +18,7 @@
import org.postgresql.copy.CopyManager;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.ZoneId;import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
Expand All @@ -32,10 +29,7 @@
import java.util.concurrent.atomic.AtomicInteger;
import java.util.Random;
import java.util.Map;
import java.util.HashMap;

@Service
public class ClinicActivityDataService {
import java.util.HashMap;@Servicepublic class ClinicActivityDataService {

private static final Logger logger = LoggerFactory.getLogger(ClinicActivityDataService.class);
private static final int BATCH_SIZE = 1000;
Expand All @@ -44,19 +38,15 @@ public class ClinicActivityDataService {
private final ClinicActivityLogRepository repository;
private final JdbcTemplate jdbcTemplate;
private final DataSource dataSource;
private final PlatformTransactionManager transactionManager;

// List of 15 possible activity types
private final PlatformTransactionManager transactionManager;// List of 15 possible activity types
private static final List<String> ACTIVITY_TYPES = List.of(
"Patient Check-in", "Patient Check-out", "Appointment Scheduling", "Medical Record Update",
"Prescription Issuance", "Lab Test Order", "Lab Test Result Review", "Billing Generation",
"Payment Processing", "Inventory Check", "Staff Shift Start", "Staff Shift End",
"Emergency Alert", "Consultation Note", "Follow-up Reminder"
);
private final Random random = new Random();
private final ExecutorService executorService = Executors.newFixedThreadPool(8);

@Autowired
private final ExecutorService executorService = Executors.newFixedThreadPool(8);@Autowired
public ClinicActivityDataService(ClinicActivityLogRepository repository,
@Qualifier("postgresJdbcTemplate") JdbcTemplate jdbcTemplate,
@Qualifier("postgresDataSource") DataSource dataSource,
Expand All @@ -69,12 +59,18 @@ public ClinicActivityDataService(ClinicActivityLogRepository repository,

@Transactional
public int getActiveLogsRatio(String type) {
var all = repository.countLogsByType(type);
var active = repository.countActiveLogsByType(type);
return active/all;
}

@Transactional
logger.debug("Calculating active logs ratio for type: {}", type);
var all = repository.countLogsByType(type);
var active = repository.countActiveLogsByType(type);
logger.debug("Total logs: {}, Active logs: {}", all, active);

if (all == 0) {
logger.warn("No logs found for type: {}. Returning 0 to avoid division by zero.", type);
return 0;
}

return active * 100 / all; // Convert to percentage
}@Transactional
public void cleanupActivityLogs() {
logger.info("Received request to clean up all clinic activity logs.");
long startTime = System.currentTimeMillis();
Expand All @@ -86,19 +82,15 @@ public void cleanupActivityLogs() {
logger.error("Error during clinic activity log cleanup", e);
throw new RuntimeException("Error cleaning up activity logs: " + e.getMessage(), e);
}
}

@Transactional
}@Transactional
public void populateData(int totalEntries) {
long startTime = System.currentTimeMillis();
Connection con = null;
try {
con = DataSourceUtils.getConnection(dataSource);
String databaseProductName = con.getMetaData().getDatabaseProductName();
DataSourceUtils.releaseConnection(con, dataSource);
con = null;

if ("PostgreSQL".equalsIgnoreCase(databaseProductName)) {
con = null;if ("PostgreSQL".equalsIgnoreCase(databaseProductName)) {
logger.info("Using PostgreSQL COPY for data population of {} entries.", totalEntries);
populateDataWithCopyToNewTransaction(totalEntries);
} else {
Expand All @@ -113,11 +105,8 @@ public void populateData(int totalEntries) {
DataSourceUtils.releaseConnection(con, dataSource);
}
}
long endTime = System.currentTimeMillis();
logger.info("Finished data population for {} clinic activity logs in {} ms.", totalEntries, (endTime - startTime));
}

private void populateDataWithCopyToNewTransaction(int totalEntries) throws Exception {
long endTime = System.currentTimeMillis();logger.info("Finished data population for {} clinic activity logs in {} ms.", totalEntries, (endTime - startTime));
}private void populateDataWithCopyToNewTransaction(int totalEntries) throws Exception {
DefaultTransactionDefinition def = new DefaultTransactionDefinition();
def.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionStatus status = transactionManager.getTransaction(def);
Expand All @@ -127,9 +116,7 @@ private void populateDataWithCopyToNewTransaction(int totalEntries) throws Excep
Faker faker = new Faker(new Locale("en-US"));
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
StringBuilder sb = new StringBuilder();
CopyManager copyManager = connection.unwrap(PGConnection.class).getCopyAPI();

for (int i = 0; i < totalEntries; i++) {
CopyManager copyManager = connection.unwrap(PGConnection.class).getCopyAPI();for (int i = 0; i < totalEntries; i++) {
String activityType = ACTIVITY_TYPES.get(random.nextInt(ACTIVITY_TYPES.size()));
int numericVal = faker.number().numberBetween(1, 100_000);
String ts = dtf.format(LocalDateTime.ofInstant(
Expand All @@ -141,9 +128,7 @@ private void populateDataWithCopyToNewTransaction(int totalEntries) throws Excep
.append(numericVal).append(',')
.append(csv(ts)).append(',')
.append(statusFlag).append(',')
.append(csv(payload)).append('\n');

if ((i + 1) % COPY_FLUSH_EVERY == 0 || (i + 1) == totalEntries) {
.append(csv(payload)).append('\n');if ((i + 1) % COPY_FLUSH_EVERY == 0 || (i + 1) == totalEntries) {
copyManager.copyIn("COPY clinic_activity_logs (activity_type, numeric_value, event_timestamp, status_flag, payload) FROM STDIN WITH (FORMAT csv)", new java.io.StringReader(sb.toString()));
sb.setLength(0);
if (logger.isInfoEnabled()){
Expand All @@ -163,9 +148,7 @@ private void populateDataWithCopyToNewTransaction(int totalEntries) throws Excep
DataSourceUtils.releaseConnection(connection, dataSource);
}
}
}

private void populateDataWithJdbcBatchInNewTransaction(int totalEntries) {
}private void populateDataWithJdbcBatchInNewTransaction(int totalEntries) {
DefaultTransactionDefinition def = new DefaultTransactionDefinition();
def.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionStatus status = transactionManager.getTransaction(def);
Expand All @@ -177,8 +160,7 @@ private void populateDataWithJdbcBatchInNewTransaction(int totalEntries) {
for (int j = 0; j < BATCH_SIZE && i < totalEntries; j++, i++) {
String activityType = ACTIVITY_TYPES.get(random.nextInt(ACTIVITY_TYPES.size()));
int numericVal = faker.number().numberBetween(1, 100_000);
Timestamp eventTimestamp = Timestamp.from(
faker.date().past(5 * 365, TimeUnit.DAYS).toInstant().atZone(ZoneId.systemDefault()).toInstant()
Timestamp eventTimestamp = Timestamp.from(faker.date().past(5 * 365, TimeUnit.DAYS).toInstant().atZone(ZoneId.systemDefault()).toInstant()
);
boolean statusFlag = faker.bool().bool();
String payload = String.join(" ", faker.lorem().paragraphs(faker.number().numberBetween(1, 3)));
Expand All @@ -195,13 +177,10 @@ private void populateDataWithJdbcBatchInNewTransaction(int totalEntries) {
} catch (Exception e) {
if (!status.isCompleted()) {
transactionManager.rollback(status);
}
logger.error("Error during JDBC batch population with new transaction", e);
}logger.error("Error during JDBC batch population with new transaction", e);
throw new RuntimeException("Error during JDBC batch population with new transaction: " + e.getMessage(), e);
}
}

private String csv(String value) {
}private String csv(String value) {
if (value == null) {
return "";
}
Expand All @@ -219,9 +198,7 @@ public void createIOIntensiveLoad(int durationMinutes, int numThreads, int limit
AtomicInteger globalOperationCount = new AtomicInteger(0);
List<Thread> threads = new ArrayList<>();

logger.info("Creating {} I/O intensive threads with {} record limit per query...", numThreads, limit);

// Create I/O intensive threads
logger.info("Creating {} I/O intensive threads with {} record limit per query...", numThreads, limit);// Create I/O intensive threads
for (int t = 0; t < numThreads; t++) {
final int threadId = t;
Thread ioThread = new Thread(() -> {
Expand All @@ -240,9 +217,7 @@ public void createIOIntensiveLoad(int durationMinutes, int numThreads, int limit
logger.info("Starting all {} I/O intensive threads...", numThreads);
for (Thread thread : threads) {
thread.start();
}

// Wait for all threads to complete
}// Wait for all threads to complete
for (Thread thread : threads) {
try {
thread.join();
Expand All @@ -260,9 +235,7 @@ public void createIOIntensiveLoad(int durationMinutes, int numThreads, int limit
logger.error("Error during I/O intensive load test", e);
throw new RuntimeException("Error during I/O intensive load test: " + e.getMessage(), e);
}
}

private void executeIOIntensiveThread(int threadId, long endTime, AtomicInteger globalOperationCount, int limit) {
}private void executeIOIntensiveThread(int threadId, long endTime, AtomicInteger globalOperationCount, int limit) {
Random random = new Random();
Faker faker = new Faker(new Locale("en-US"));
int localOperationCount = 0;
Expand All @@ -278,10 +251,7 @@ private void executeIOIntensiveThread(int threadId, long endTime, AtomicInteger
"FROM clinic_activity_logs " +
"WHERE LENGTH(payload) > 100 " +
"ORDER BY random()" +
"LIMIT " + limit);


localOperationCount++;
"LIMIT " + limit);localOperationCount++;
int currentGlobalCount = globalOperationCount.incrementAndGet();

// Log progress every 100 operations per thread
Expand All @@ -295,9 +265,7 @@ private void executeIOIntensiveThread(int threadId, long endTime, AtomicInteger
// But avoid overwhelming the system with a tiny yield
if (localOperationCount % 50 == 0) {
Thread.yield();
}

} catch (Exception e) {
}} catch (Exception e) {
logger.error("Error in I/O operation for thread {}", threadId, e);
try {
Thread.sleep(10); // Brief pause on error
Expand All @@ -310,4 +278,4 @@ private void executeIOIntensiveThread(int threadId, long endTime, AtomicInteger

logger.info("I/O Thread {} completed {} total I/O operations", threadId, localOperationCount);
}
}
}