From a3a8079320cbcc221f8bf8a87a5c0d97913f96e4 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 16 Jul 2025 15:45:09 +0530 Subject: [PATCH 01/23] Fix the issue in retrieving the Casesheet Print Data for Cancer Screening (#96) * fix: change the return type to object to get the details * fix: remove commented lines --- pom.xml | 17 +++++++++++------ .../iemr/mmu/repo/nurse/BenVisitDetailRepo.java | 2 +- .../cancerScreening/CSNurseServiceImpl.java | 2 +- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/pom.xml b/pom.xml index 70859837..eeef30e2 100644 --- a/pom.xml +++ b/pom.xml @@ -163,6 +163,17 @@ jackson-datatype-joda 2.17.0 + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + jakarta.servlet jakarta.servlet-api @@ -179,12 +190,6 @@ org.apache.poi poi-ooxml 5.2.5 - - - org.apache.commons - commons-compress - - diff --git a/src/main/java/com/iemr/mmu/repo/nurse/BenVisitDetailRepo.java b/src/main/java/com/iemr/mmu/repo/nurse/BenVisitDetailRepo.java index d0902ee0..8c659c8b 100644 --- a/src/main/java/com/iemr/mmu/repo/nurse/BenVisitDetailRepo.java +++ b/src/main/java/com/iemr/mmu/repo/nurse/BenVisitDetailRepo.java @@ -68,7 +68,7 @@ public Integer updateBenFlowStatus(@Param("visitFlowStatusFlag") String visitFlo + "bvd.reportFilePath,sp.serviceProviderName from BeneficiaryVisitDetail bvd " + "INNER JOIN bvd.providerServiceMapping p " + "INNER JOIN p.serviceProvider sp " + "WHERE bvd.beneficiaryRegID = :benRegID AND bvd.visitCode = :visitCode ") - public List getBeneficiaryVisitDetails(@Param("benRegID") Long benRegID, + public List getBeneficiaryVisitDetails(@Param("benRegID") Long benRegID, @Param("visitCode") Long visitCode); @Query(" SELECT COUNT(benVisitID) FROM BeneficiaryVisitDetail WHERE beneficiaryRegID = :benRegID GROUP BY beneficiaryRegID ") diff --git a/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java b/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java index 770e81a8..b6b15f22 100644 --- a/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java @@ -710,7 +710,7 @@ public Map getBenNurseDataForCaseSheet(Long benRegID, Long visit } public BeneficiaryVisitDetail getBeneficiaryVisitDetails(Long benRegID, Long visitCode) { - List beneficiaryVisitDetail = benVisitDetailRepo.getBeneficiaryVisitDetails(benRegID, visitCode); + List beneficiaryVisitDetail = benVisitDetailRepo.getBeneficiaryVisitDetails(benRegID, visitCode); BeneficiaryVisitDetail beneficiaryVisit = null; if (null != beneficiaryVisitDetail) { for (Object[] obj : beneficiaryVisitDetail) { From 458af672d6c553a79385552dd184aab83beaa6cd Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 16 Jul 2025 15:49:36 +0530 Subject: [PATCH 02/23] fix: add the column for NumberperWeek to store and fetch the data (#94) --- .../iemr/mmu/data/anc/BenPersonalHabit.java | 35 +++++++++++++++---- .../repo/nurse/anc/BenPersonalHabitRepo.java | 4 +-- .../transaction/CommonNurseServiceImpl.java | 9 +++-- 3 files changed, 37 insertions(+), 11 deletions(-) diff --git a/src/main/java/com/iemr/mmu/data/anc/BenPersonalHabit.java b/src/main/java/com/iemr/mmu/data/anc/BenPersonalHabit.java index 17594d3e..078c3208 100644 --- a/src/main/java/com/iemr/mmu/data/anc/BenPersonalHabit.java +++ b/src/main/java/com/iemr/mmu/data/anc/BenPersonalHabit.java @@ -93,6 +93,10 @@ public class BenPersonalHabit { @Column(name = "NumberperDay") private Short numberperDay; + @Expose + @Column(name = "NumberperWeek") + private Short numberperWeek; + @Expose @Column(name = "TobaccoUseDuration") private Timestamp tobaccoUseDuration; @@ -333,10 +337,18 @@ public Short getNumberperDay() { return numberperDay; } + public Short getNumberperWeek() { + return numberperWeek; + } + public void setNumberperDay(Short numberperDay) { this.numberperDay = numberperDay; } + public void setNumberperWeek(Short numberperWeek) { + this.numberperWeek = numberperWeek; + } + public Timestamp getTobaccoUseDuration() { return tobaccoUseDuration; } @@ -579,6 +591,9 @@ public ArrayList getPersonalHistory() { if (null != tobaccoInfo.get("numberperDay")) { benPersonalHabit.setNumberperDay(new Short(tobaccoInfo.get("numberperDay"))); } + if (null != tobaccoInfo.get("numberperWeek")) { + benPersonalHabit.setNumberperWeek(new Short(tobaccoInfo.get("numberperWeek"))); + } timePeriodUnit = (String) tobaccoInfo.get("durationUnit"); if (null != tobaccoInfo.get("duration")) { @@ -634,7 +649,7 @@ public ArrayList getPersonalHistory() { } public BenPersonalHabit(Date createdDate, String dietaryType, String physicalActivityType, String tobaccoUseStatus, - String tobaccoUseType, String otherTobaccoUseType, Short numberperDay, Date tobaccoUseDuration, + String tobaccoUseType, String otherTobaccoUseType, Short numberperDay, Short numberperWeek, Date tobaccoUseDuration, Character riskySexualPracticesStatus) { super(); this.captureDate = createdDate; @@ -650,6 +665,8 @@ public BenPersonalHabit(Date createdDate, String dietaryType, String physicalAct } else if(riskySexualPracticesStatus !=null && riskySexualPracticesStatus == '1') { this.riskySexualPracticeStatus = "Yes"; } + this.numberperWeek = numberperWeek; + } public BenPersonalHabit(Date createdDate, String dietaryType, String physicalActivityType, @@ -687,7 +704,7 @@ public BenPersonalHabit(Long beneficiaryRegID, Long benVisitID, Integer provider } public BenPersonalHabit(String tobaccoUseTypeID, String tobaccoUseType, String otherTobaccoUseType, - Short numberperDay, Timestamp tobaccoUseDuration, String alcoholTypeID, String alcoholType, + Short numberperDay, Short numberperWeek, Timestamp tobaccoUseDuration, String alcoholTypeID, String alcoholType, String otherAlcoholType, String alcoholIntakeFrequency, String avgAlcoholConsumption, Timestamp alcoholDuration, Timestamp createdDate, Long visitCode) { super(); @@ -704,6 +721,7 @@ public BenPersonalHabit(String tobaccoUseTypeID, String tobaccoUseType, String o this.alcoholDuration = alcoholDuration; this.createdDate = createdDate; this.visitCode = visitCode; + this.numberperWeek = numberperWeek; } public static BenPersonalHabit getPersonalDetails(ArrayList personalHistoryDetails) { @@ -712,15 +730,15 @@ public static BenPersonalHabit getPersonalDetails(ArrayList personalHi Object[] obj1 = personalHistoryDetails.get(0); personalDetails = new BenPersonalHabit((Long) obj1[0], (Long) obj1[1], (Integer) obj1[2], (String) obj1[3], - (String) obj1[4], (String) obj1[5], (String) obj1[11], (Character) obj1[18]); + (String) obj1[4], (String) obj1[5], (String) obj1[12], (Character) obj1[19]); ArrayList> tobaccoList = new ArrayList>(); ArrayList> alcoholList = new ArrayList>(); for (Object[] obj : personalHistoryDetails) { BenPersonalHabit personalHabits = new BenPersonalHabit((String) obj[6], (String) obj[7], - (String) obj[8], (Short) obj[9], (Timestamp) obj[10], (String) obj[12], (String) obj[13], - (String) obj[14], (String) obj[15], (String) obj[16], (Timestamp) obj[17], (Timestamp) obj[19], - (Long) obj[20]); + (String) obj[8], (Short) obj[9], (Short) obj[10], (Timestamp) obj[11], (String) obj[13], (String) obj[14], + (String) obj[15], (String) obj[16], (String) obj[17], (Timestamp) obj[18], (Timestamp) obj[20], + (Long) obj[21]); Map timePeriod = null; // Integer timePeriodAgo = null; @@ -732,7 +750,10 @@ public static BenPersonalHabit getPersonalDetails(ArrayList personalHi if (null != personalHabits.getNumberperDay()) { tobaccoInfo.put("numberperDay", personalHabits.getNumberperDay().toString()); } - + if (null != personalHabits.getNumberperWeek()) { + tobaccoInfo.put("numberperWeek", personalHabits.getNumberperWeek().toString()); + } + timePeriod = Utility.convertTimeToWords(personalHabits.getTobaccoUseDuration(), personalHabits.getCreatedDate()); diff --git a/src/main/java/com/iemr/mmu/repo/nurse/anc/BenPersonalHabitRepo.java b/src/main/java/com/iemr/mmu/repo/nurse/anc/BenPersonalHabitRepo.java index 8ad5044f..be3d149d 100644 --- a/src/main/java/com/iemr/mmu/repo/nurse/anc/BenPersonalHabitRepo.java +++ b/src/main/java/com/iemr/mmu/repo/nurse/anc/BenPersonalHabitRepo.java @@ -39,7 +39,7 @@ public interface BenPersonalHabitRepo extends CrudRepository getBenLastVisitID(@Param("beneficiaryRegID") Long beneficiaryRegID); - @Query("select Date(createdDate), dietaryType, physicalActivityType, tobaccoUseStatus, tobaccoUseType, otherTobaccoUseType, numberperDay, " + @Query("select Date(createdDate), dietaryType, physicalActivityType, tobaccoUseStatus, tobaccoUseType, otherTobaccoUseType, numberperDay, numberperWeek, " + "Date(tobaccoUseDuration), riskySexualPracticesStatus from BenPersonalHabit a where a.beneficiaryRegID = :beneficiaryRegID " + "AND tobaccoUseStatus is not null AND deleted = false order by createdDate DESC") public ArrayList getBenPersonalTobaccoHabitDetail(@Param("beneficiaryRegID") Long beneficiaryRegID); @@ -51,7 +51,7 @@ public interface BenPersonalHabitRepo extends CrudRepository getBenPersonalAlcoholHabitDetail(@Param("beneficiaryRegID") Long beneficiaryRegID); @Query(" SELECT beneficiaryRegID, benVisitID, providerServiceMapID, dietaryType, physicalActivityType, tobaccoUseStatus, tobaccoUseTypeID, " - + "tobaccoUseType, otherTobaccoUseType, numberperDay, tobaccoUseDuration, alcoholIntakeStatus, alcoholTypeID, " + + "tobaccoUseType, otherTobaccoUseType, numberperDay, numberperWeek, tobaccoUseDuration, alcoholIntakeStatus, alcoholTypeID, " + "alcoholType, otherAlcoholType, alcoholIntakeFrequency, avgAlcoholConsumption, alcoholDuration, riskySexualPracticesStatus, createdDate, visitCode " + "FROM BenPersonalHabit WHERE beneficiaryRegID = :benRegID AND deleted = false AND visitCode = :visitCode") public ArrayList getBenPersonalHabitDetail(@Param("benRegID") Long benRegID, diff --git a/src/main/java/com/iemr/mmu/service/common/transaction/CommonNurseServiceImpl.java b/src/main/java/com/iemr/mmu/service/common/transaction/CommonNurseServiceImpl.java index 4326df40..0fedb10d 100644 --- a/src/main/java/com/iemr/mmu/service/common/transaction/CommonNurseServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/common/transaction/CommonNurseServiceImpl.java @@ -1085,6 +1085,11 @@ public String fetchBenPersonalTobaccoHistory(Long beneficiaryRegID) { column.put("keyName", "numberperDay"); columns.add(column); + column = new HashMap(); + column.put("columnName", "Number Per Week"); + column.put("keyName", "numberperWeek"); + columns.add(column); + column = new HashMap(); column.put("columnName", "Tobacco Use Start Date"); column.put("keyName", "tobacco_use_duration"); @@ -1100,8 +1105,8 @@ public String fetchBenPersonalTobaccoHistory(Long beneficiaryRegID) { for (Object[] obj : benPersonalHabits) { BenPersonalHabit benPersonalHabit = new BenPersonalHabit((Date) obj[0], (String) obj[1], - (String) obj[2], (String) obj[3], (String) obj[4], (String) obj[5], (Short) obj[6], - (Date) obj[7], (Character) obj[8]); + (String) obj[2], (String) obj[3], (String) obj[4], (String) obj[5], (Short) obj[6], (Short) obj[7], + (Date) obj[8], (Character) obj[9]); personalHabits.add(benPersonalHabit); } From cd5cffbeb95f7cf4fe12f4ff9832dcf9afc1f639 Mon Sep 17 00:00:00 2001 From: Amoghavarsh <93114621+5Amogh@users.noreply.github.com> Date: Mon, 21 Jul 2025 11:57:09 +0530 Subject: [PATCH 03/23] Update version in pom.xml to 3.4.0 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index eeef30e2..e436c7bb 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ 4.0.0 com.iemr.mmu mmu-api - 3.1.0 + 3.4.0 war MMU-API From 16c39912c602ee2a9c7770a607b3a07f67460b67 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Mon, 21 Jul 2025 14:34:15 +0530 Subject: [PATCH 04/23] chore: add Lombok @Data to BenClinicalObservations (#97) --- .../mmu/data/quickConsultation/BenClinicalObservations.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/com/iemr/mmu/data/quickConsultation/BenClinicalObservations.java b/src/main/java/com/iemr/mmu/data/quickConsultation/BenClinicalObservations.java index 3d2941c1..96721ce6 100644 --- a/src/main/java/com/iemr/mmu/data/quickConsultation/BenClinicalObservations.java +++ b/src/main/java/com/iemr/mmu/data/quickConsultation/BenClinicalObservations.java @@ -34,7 +34,10 @@ import com.google.gson.annotations.Expose; +import lombok.Data; + @Entity +@Data @Table(name = "t_benclinicalobservation") public class BenClinicalObservations { @Id From b66035cdf1b1da7bdaba74bc8637b748d4a33978 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Fri, 25 Jul 2025 14:55:13 +0530 Subject: [PATCH 05/23] fix: add file path in cancer gynecological examination (#98) --- .../mmu/data/doctor/CancerGynecologicalExamination.java | 8 ++++++++ .../iemr/mmu/service/cancerScreening/CSServiceImpl.java | 6 ++++++ 2 files changed, 14 insertions(+) diff --git a/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java b/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java index fb1ab66f..a63bbc9b 100644 --- a/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java +++ b/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java @@ -33,9 +33,12 @@ import jakarta.persistence.Transient; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; import com.google.gson.annotations.Expose; +import lombok.Data; @Entity +@Data @Table(name = "t_cancergynecologicalexamination") public class CancerGynecologicalExamination { @Id @@ -92,6 +95,11 @@ public class CancerGynecologicalExamination { @Column(name = "RTIOrSTIDetail") private String rTIOrSTIDetail; + @Expose + @Transient + @JsonProperty("fileIDs") + private List fileIDs; + @Expose @Column(name = "FilePath") private String filePath; diff --git a/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java b/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java index c65f38f2..9a97fbcb 100644 --- a/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java @@ -977,6 +977,12 @@ public Long saveBenExaminationDetails(JsonObject requestOBJ, Long benVisitID, St .fromJson(examinationOBJ.get("gynecologicalDetails"), CancerGynecologicalExamination.class); cancerGynecologicalExamination.setBenVisitID(benVisitID); cancerGynecologicalExamination.setVisitCode(benVisitCode); + + if (cancerGynecologicalExamination.getFileIDs() != null) { + cancerGynecologicalExamination.setFilePath( + String.join(",", cancerGynecologicalExamination.getFileIDs())); + } + Long ID = cSNurseServiceImpl.saveCancerGynecologicalExaminationData(cancerGynecologicalExamination); if (ID != null && ID > 0) { // gynecologicalDetails stored successfully... From d878d4b1037a8debbe285cfdfbe2d8f393c993fa Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 30 Jul 2025 11:04:36 +0530 Subject: [PATCH 06/23] Fix the data sync issue (#93) * fix: Data Sync batch processing for large data * fix: use parameterized query * fix: revert the updated query * fix: add token if it is missing while calling restTemplate * fix: update the properties * fix: sync group wise * fix: enable logger in pom.xml * fix: coderabbit comments * fix: remove logger and replace the license * fix: remove the logs * fix: resolve code scanning alert * fix: resolve code scanning alert * fix: resolve code scanning alert * fix: resolve code scanning alert * fix: add comment for code violation * fix: use syncuploaddataDigester class to load the deatils * fix: add syncuploaddigestor in implementation file too * fix: sonarcube comments --- pom.xml | 14 +- .../MMUDataSyncVanToServer.java | 11 +- .../DataSyncRepositoryCentral.java | 348 +++---- .../GetDataFromVanAndSyncToDBImpl.java | 952 +++++++++++------- .../com/iemr/mmu/utils/RestTemplateUtil.java | 45 +- src/main/resources/application.properties | 7 + 6 files changed, 848 insertions(+), 529 deletions(-) diff --git a/pom.xml b/pom.xml index e436c7bb..b0e8639c 100644 --- a/pom.xml +++ b/pom.xml @@ -47,12 +47,22 @@ org.springframework.boot spring-boot-starter - + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} co.elastic.logging diff --git a/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java b/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java index dedad0eb..eeb54e9f 100644 --- a/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java +++ b/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java @@ -36,9 +36,11 @@ import com.iemr.mmu.service.dataSyncLayerCentral.FetchDownloadDataImpl; import com.iemr.mmu.service.dataSyncLayerCentral.GetDataFromVanAndSyncToDBImpl; import com.iemr.mmu.service.dataSyncLayerCentral.GetMasterDataFromCentralForVanImpl; +import com.iemr.mmu.utils.CookieUtil; import com.iemr.mmu.utils.response.OutputResponse; import io.swagger.v3.oas.annotations.Operation; +import jakarta.servlet.http.HttpServletRequest; /*** * @operation Class used for data sync from van-to-server & server-to-van @@ -58,10 +60,15 @@ public class MMUDataSyncVanToServer { @Operation(summary = "Sync data from van-to-server") @PostMapping(value = { "/van-to-server" }, consumes = "application/json", produces = "application/json") public String dataSyncToServer(@RequestBody String requestOBJ, - @RequestHeader(value = "Authorization") String Authorization) { + @RequestHeader(value = "Authorization") String Authorization, HttpServletRequest request) { OutputResponse response = new OutputResponse(); + + logger.info("test: vanto server auth="+Authorization); try { - String s = getDataFromVanAndSyncToDBImpl.syncDataToServer(requestOBJ, Authorization); + String jwtToken = CookieUtil.getJwtTokenFromCookie(request); + logger.info("test: vanto server token="+jwtToken); + + String s = getDataFromVanAndSyncToDBImpl.syncDataToServer(requestOBJ, Authorization, jwtToken); if (s != null) response.setResponse(s); else diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java index 6b62af69..175de980 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java @@ -25,6 +25,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Set; import javax.sql.DataSource; @@ -34,175 +35,186 @@ import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.stereotype.Service; -/*** - * - * @author NE298657 - * - */ +import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; @Service public class DataSyncRepositoryCentral { - @Autowired - private DataSource dataSource; - - private JdbcTemplate jdbcTemplate; - - private JdbcTemplate getJdbcTemplate() { - return new JdbcTemplate(dataSource); - - } - - private Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); - - // Data Upload Repository - public int checkRecordIsAlreadyPresentOrNot(String schemaName, String tableName, String vanSerialNo, String vanID, - String vanAutoIncColumnName, int syncFacilityID) { - jdbcTemplate = getJdbcTemplate(); - - List params = new ArrayList<>(); - - StringBuilder queryBuilder = new StringBuilder("SELECT "); - queryBuilder.append(vanAutoIncColumnName); - queryBuilder.append(" FROM "); - queryBuilder.append(schemaName+"."+tableName); - - //params.add(vanAutoIncColumnName); - //params.add(schemaName); - //params.add(tableName); - - StringBuilder whereClause = new StringBuilder(); - whereClause.append(" WHERE "); - whereClause.append("VanSerialNo = ?"); - params.add(vanSerialNo); - - if ((tableName.equalsIgnoreCase("t_patientissue") || tableName.equalsIgnoreCase("t_physicalstockentry") - || tableName.equalsIgnoreCase("t_stockadjustment") || tableName.equalsIgnoreCase("t_saitemmapping") - || tableName.equalsIgnoreCase("t_stocktransfer") || tableName.equalsIgnoreCase("t_patientreturn") - || tableName.equalsIgnoreCase("t_facilityconsumption") || tableName.equalsIgnoreCase("t_indent") - || tableName.equalsIgnoreCase("t_indentorder") || tableName.equalsIgnoreCase("t_indentissue") - || tableName.equalsIgnoreCase("t_itemstockentry") || tableName.equalsIgnoreCase("t_itemstockexit")) - && syncFacilityID > 0) { - - whereClause.append(" AND "); - whereClause.append("SyncFacilityID = ?"); - params.add(syncFacilityID); - - } - - else { - - whereClause.append(" AND "); - whereClause.append("VanID = ?"); - params.add(vanID); - - } - - queryBuilder.append(whereClause); - String query = queryBuilder.toString(); - Object[] queryParams = params.toArray(); - List> resultSet = jdbcTemplate.queryForList(query, queryParams); - if (resultSet != null && resultSet.size() > 0) - return 1; - else - return 0; - } - - // Method for synchronization of data to central DB - public int[] syncDataToCentralDB(String schema, String tableName, String serverColumns, String query, - List syncDataList) { - jdbcTemplate = getJdbcTemplate(); - if (query.startsWith("INSERT")) { - for (int i = 0; i < syncDataList.size(); i++) { - Object[] array = syncDataList.get(i);// Arrey 1 - - if (query.startsWith("INSERT")) { -// array = new Object[] {serverColumns, array }; - syncDataList.set(i, array); - } - } - } else { - for (int i = 0; i < syncDataList.size(); i++) { - - Object[] array = syncDataList.get(i);// Arrey 1 - String[] columnsArray = null; - if(null != serverColumns) - columnsArray = serverColumns.split(","); // arrey 2 - - List Newarray = new ArrayList<>(); - - int arrayIndex = 0; - int columnsArrayIndex = 0; - //Newarray.add(schema); - //Newarray.add(tableName); - //while (columnsArrayIndex < columnsArray.length || arrayIndex < array.length) { - if (null != columnsArray && columnsArrayIndex < columnsArray.length) { - Newarray.add(columnsArray[columnsArrayIndex]); - columnsArrayIndex++; - } - - /* - * if (arrayIndex < array.length) { Newarray.add(array); arrayIndex++; } - */ - //} - - // Convert Newarray back to an array - //Object[] resultArray = Newarray.toArray(new Object[0]); - syncDataList.set(i, array); - - } - } - // start batch insert/update - int[] i = jdbcTemplate.batchUpdate(query, syncDataList); - return i; - - } - - // End of Data Upload Repository - - public List> getMasterDataFromTable(String schema, String table, String columnNames, - String masterType, Timestamp lastDownloadDate, Integer vanID, Integer psmID) throws Exception { - jdbcTemplate = getJdbcTemplate(); - List> resultSetList =new ArrayList<>(); - String baseQuery = ""; - if (masterType != null) { - if (lastDownloadDate != null) { - if (masterType.equalsIgnoreCase("A")) { - baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE LastModDate >= ? "; - resultSetList = jdbcTemplate.queryForList(baseQuery,lastDownloadDate); - - } - else if (masterType.equalsIgnoreCase("V")) { - baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE LastModDate >= ? AND VanID = ? "; - resultSetList = jdbcTemplate.queryForList(baseQuery,lastDownloadDate,vanID); - } - else if (masterType.equalsIgnoreCase("P")) { - baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE LastModDate >= ? AND ProviderServiceMapID = ? "; - resultSetList = jdbcTemplate.queryForList(baseQuery,lastDownloadDate,psmID); - } - } else { - if (masterType.equalsIgnoreCase("A")) { - baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table; - resultSetList = jdbcTemplate.queryForList(baseQuery); - } - else if (masterType.equalsIgnoreCase("V")) { - baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table + " WHERE VanID = ? "; - resultSetList = jdbcTemplate.queryForList(baseQuery,vanID); - } - else if (masterType.equalsIgnoreCase("P")) { - baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE ProviderServiceMapID = ? "; - resultSetList = jdbcTemplate.queryForList(baseQuery,psmID); - } - } - } - logger.info("Select query central: " + baseQuery); - logger.info("Last Downloaded Date " + lastDownloadDate); - logger.info("Result set Details: " + resultSetList); - return resultSetList; - } - - // End of Data Download Repository + @Autowired + private DataSource dataSource; + + private JdbcTemplate jdbcTemplate; + + private JdbcTemplate getJdbcTemplate() { + if (this.jdbcTemplate == null) { + this.jdbcTemplate = new JdbcTemplate(dataSource); + } + return this.jdbcTemplate; + } + + private final Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); + + private static final Set VALID_SCHEMAS = Set.of("public", "db_iemr"); + + private static final Set VALID_TABLES = Set.of( + "m_beneficiaryregidmapping", "i_beneficiaryaccount", "i_beneficiaryaddress", "i_beneficiarycontacts", + "i_beneficiarydetails", "i_beneficiaryfamilymapping", "i_beneficiaryidentity", "i_beneficiarymapping", + "t_benvisitdetail", "t_phy_anthropometry", "t_phy_vitals", "t_benadherence", "t_anccare", "t_pnccare", + "t_ncdscreening", "t_ncdcare", "i_ben_flow_outreach", "t_covid19", "t_idrsdetails", "t_physicalactivity", + "t_phy_generalexam", "t_phy_headtotoe", "t_sys_obstetric", "t_sys_gastrointestinal", "t_sys_cardiovascular", + "t_sys_respiratory", "t_sys_centralnervous", "t_sys_musculoskeletalsystem", "t_sys_genitourinarysystem", + "t_ancdiagnosis", "t_ncddiagnosis", "t_pncdiagnosis", "t_benchefcomplaint", "t_benclinicalobservation", + "t_prescription", "t_prescribeddrug", "t_lab_testorder", "t_benreferdetails", + "t_lab_testresult", "t_physicalstockentry", "t_patientissue", "t_facilityconsumption", "t_itemstockentry", + "t_itemstockexit", "t_benmedhistory", "t_femaleobstetrichistory", "t_benmenstrualdetails", + "t_benpersonalhabit", "t_childvaccinedetail1", "t_childvaccinedetail2", "t_childoptionalvaccinedetail", + "t_ancwomenvaccinedetail", "t_childfeedinghistory", "t_benallergyhistory", "t_bencomorbiditycondition", + "t_benmedicationhistory", "t_benfamilyhistory", "t_perinatalhistory", "t_developmenthistory", + "t_cancerfamilyhistory", "t_cancerpersonalhistory", "t_cancerdiethistory", "t_cancerobstetrichistory", + "t_cancervitals", "t_cancersignandsymptoms", "t_cancerlymphnode", "t_canceroralexamination", + "t_cancerbreastexamination", "t_cancerabdominalexamination", "t_cancergynecologicalexamination", + "t_cancerdiagnosis", "t_cancerimageannotation", "i_beneficiaryimage", "t_stockadjustment", + "t_stocktransfer", "t_patientreturn", "t_indent", "t_indentissue", "t_indentorder", "t_saitemmapping" + ); + + private boolean isValidDatabaseIdentifierCharacter(String identifier) { + return identifier != null && identifier.matches("^[a-zA-Z_][a-zA-Z0-9_]*$"); + } + + private boolean isValidSchemaName(String schemaName) { + return VALID_SCHEMAS.contains(schemaName.toLowerCase()); + } + + private boolean isValidTableName(String tableName) { + return VALID_TABLES.contains(tableName.toLowerCase()); + } + + private boolean isValidColumnNamesList(String columnNames) { + if (columnNames == null || columnNames.trim().isEmpty()) { + return false; + } + for (String col : columnNames.split(",")) { + if (!isValidDatabaseIdentifierCharacter(col.trim())) { + return false; + } + } + return true; + } + + public int checkRecordIsAlreadyPresentOrNot(String schemaName, String tableName, String vanSerialNo, String vanID, + String vanAutoIncColumnName, int syncFacilityID) { + jdbcTemplate = getJdbcTemplate(); + List params = new ArrayList<>(); + + if (!isValidSchemaName(schemaName) || !isValidTableName(tableName) || + !isValidDatabaseIdentifierCharacter(vanAutoIncColumnName)) { + logger.error("Invalid identifiers: schema={}, table={}, column={}", schemaName, tableName, vanAutoIncColumnName); + throw new IllegalArgumentException("Invalid identifiers provided."); + } + + StringBuilder queryBuilder = new StringBuilder("SELECT ") + .append(vanAutoIncColumnName).append(" FROM ") + .append(schemaName).append(".").append(tableName).append(" WHERE VanSerialNo = ?"); + + params.add(vanSerialNo); + + if (List.of("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit").contains(tableName.toLowerCase()) && syncFacilityID > 0) { + queryBuilder.append(" AND SyncFacilityID = ?"); + params.add(syncFacilityID); + } else { + queryBuilder.append(" AND VanID = ?"); + params.add(vanID); + } + + try { + List> resultSet = jdbcTemplate.queryForList(queryBuilder.toString(), params.toArray()); + return (resultSet != null && !resultSet.isEmpty()) ? 1 : 0; + } catch (Exception e) { + logger.error("Error checking record presence: {}", e.getMessage(), e); + throw new RuntimeException("Failed to check record existence: " + e.getMessage(), e); + } + } + + public int[] syncDataToCentralDB(String schema, String tableName, String serverColumns, String query, + List syncDataList) { + jdbcTemplate = getJdbcTemplate(); + try { + return jdbcTemplate.batchUpdate(query, syncDataList); + } catch (Exception e) { + logger.error("Batch sync failed for table {}: {}", tableName, e.getMessage(), e); + throw new RuntimeException("Batch sync failed: " + e.getMessage(), e); + } + } + + public List> getMasterDataFromTable(String schema, String table, String columnNames, + String masterType, Timestamp lastDownloadDate, Integer vanID, Integer psmID) { + jdbcTemplate = getJdbcTemplate(); + List params = new ArrayList<>(); + + if (!isValidSchemaName(schema) || !isValidTableName(table) || !isValidColumnNamesList(columnNames)) { + throw new IllegalArgumentException("Invalid schema, table, or column names."); + } + + StringBuilder queryBuilder = new StringBuilder("SELECT ").append(columnNames) + .append(" FROM ").append(schema).append(".").append(table); + + if (masterType != null) { + if (lastDownloadDate != null) { + queryBuilder.append(" WHERE LastModDate >= ?"); + params.add(lastDownloadDate); + + if ("V".equalsIgnoreCase(masterType)) { + queryBuilder.append(" AND VanID = ?"); + params.add(vanID); + } else if ("P".equalsIgnoreCase(masterType)) { + queryBuilder.append(" AND ProviderServiceMapID = ?"); + params.add(psmID); + } + } else { + queryBuilder.append(" WHERE "); + if ("V".equalsIgnoreCase(masterType)) { + queryBuilder.append("VanID = ?"); + params.add(vanID); + } else if ("P".equalsIgnoreCase(masterType)) { + queryBuilder.append("ProviderServiceMapID = ?"); + params.add(psmID); + } + } + } + + try { + // Safe dynamic SQL: All dynamic parts (table names, columns, etc.) are validated or hardcoded. + // Parameter values are bound safely using prepared statement placeholders (?). + return jdbcTemplate.queryForList(queryBuilder.toString(), params.toArray()); + } catch (Exception e) { + logger.error("Error fetching master data: {}", e.getMessage(), e); + throw new RuntimeException("Failed to fetch master data: " + e.getMessage(), e); + } + } + + public List> getBatchForBenDetails(SyncUploadDataDigester digester, + String whereClause, int limit, int offset) { + jdbcTemplate = getJdbcTemplate(); + +String schema = digester.getSchemaName(); + String table = digester.getTableName(); + String columnNames = digester.getServerColumns(); + + + if (!isValidSchemaName(schema) || !isValidTableName(table) || !isValidColumnNamesList(columnNames)) { + throw new IllegalArgumentException("Invalid schema, table, or column names."); + } + // Safe dynamic SQL: Schema, table, and column names are validated against predefined whitelists. + // Only trusted values are used in the query string. + // limit and offset are passed as parameters to prevent SQL injection. + String query = String.format("SELECT %s FROM %s.%s %s LIMIT ? OFFSET ?", columnNames, schema, table, whereClause); //NOSONAR + + try { + return jdbcTemplate.queryForList(query, limit, offset); + } catch (Exception e) { + logger.error("Error fetching batch details: {}", e.getMessage(), e); + throw new RuntimeException("Failed to fetch batch data: " + e.getMessage(), e); + } + } } diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index d70404bb..2d88e6f6 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -21,365 +21,625 @@ */ package com.iemr.mmu.service.dataSyncLayerCentral; -import java.sql.Timestamp; -import java.text.DateFormat; -import java.text.SimpleDateFormat; import java.time.LocalDateTime; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.HashMap; +import java.util.Set; +import java.util.HashSet; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import com.fasterxml.jackson.databind.ObjectMapper; import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; -import com.iemr.mmu.utils.mapper.InputMapper; -/*** - * - * @author NE298657 - * - */ @Service public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB { - private static final String ServerColumnsNotRequired = null; - @Autowired - private DataSyncRepositoryCentral dataSyncRepositoryCentral; - - public String syncDataToServer(String requestOBJ, String Authorization) throws Exception { - - // feed sync request - ObjectMapper mapper = new ObjectMapper(); - SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); - /* - * SyncUploadDataDigester syncUploadDataDigester = - * InputMapper.gson().fromJson(requestOBJ, SyncUploadDataDigester.class); - */ - String syncTableName = syncUploadDataDigester.getTableName(); - if (syncUploadDataDigester != null && syncTableName != null - && syncTableName.equalsIgnoreCase("m_beneficiaryregidmapping")) { - String s = update_M_BeneficiaryRegIdMapping_for_provisioned_benID(syncUploadDataDigester); - return s; - } else { - - List> dataToBesync = syncUploadDataDigester.getSyncData(); - - Object[] objArr; - - // sync data 'list of object array' - List syncDataListInsert = new ArrayList<>(); - List syncDataListUpdate = new ArrayList<>(); - - int pointer; - String vanSerialNo; - String vanID; - int recordCheck; - int syncFacilityID = 0; - - for (Map map : dataToBesync) { - pointer = 0; - recordCheck = 0; - vanSerialNo = ""; - vanID = ""; - - vanSerialNo = String.valueOf(map.get(syncUploadDataDigester.getVanAutoIncColumnName())); - vanID = String.valueOf(map.get("VanID")); - - map.replace("SyncedBy", syncUploadDataDigester.getSyncedBy()); - - map.replace("date_format(SyncedDate,'%Y-%m-%d %H:%i:%s')", String.valueOf(LocalDateTime.now())); - - if (syncUploadDataDigester.getFacilityID() != null) { - Double changeDoubleToIntegerID = 0.0; - switch (syncTableName) { - case "t_indent": { - if (map.containsKey("FromFacilityID") && map.get("FromFacilityID") != null) { - changeDoubleToIntegerID = (Double) map.get("FromFacilityID"); - if (changeDoubleToIntegerID.intValue() == syncUploadDataDigester.getFacilityID()) - map.replace("Processed", "P"); - } - - } - case "t_indentorder": { - if (map.containsKey("FromFacilityID") && map.get("FromFacilityID") != null) - changeDoubleToIntegerID = (Double) map.get("FromFacilityID"); - if (changeDoubleToIntegerID.intValue() == syncUploadDataDigester.getFacilityID()) - map.replace("Processed", "P"); - } - case "t_indentissue": { - if (map.containsKey("ToFacilityID") && map.get("ToFacilityID") != null) { - changeDoubleToIntegerID = (Double) map.get("ToFacilityID"); - if (changeDoubleToIntegerID.intValue() == syncUploadDataDigester.getFacilityID()) - map.replace("Processed", "P"); - } - - } - // here a change in rule, will compare with toFacilityID - case "t_stocktransfer": { - if (map.containsKey("TransferToFacilityID") && map.get("TransferToFacilityID") != null) { - changeDoubleToIntegerID = (Double) map.get("TransferToFacilityID"); - if (changeDoubleToIntegerID.intValue() == syncUploadDataDigester.getFacilityID()) - map.replace("Processed", "P"); - } - - } - case "t_itemstockentry": { - - if (map.containsKey("FacilityID") && map.get("FacilityID") != null) { - changeDoubleToIntegerID = (Double) map.get("FacilityID"); - if (changeDoubleToIntegerID.intValue() == syncUploadDataDigester.getFacilityID()) - map.replace("Processed", "P"); - } - - } - default: - - } - - } - - if (map.containsKey("SyncFacilityID")) { - //double syncFaciltyID = (double) map.get("SyncFacilityID"); - syncFacilityID = (int) map.get("SyncFacilityID"); - } - - recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( - syncUploadDataDigester.getSchemaName(), syncUploadDataDigester.getTableName(), vanSerialNo, - vanID, syncUploadDataDigester.getVanAutoIncColumnName(), syncFacilityID); - - if (recordCheck == 0) { - objArr = new Object[map.size()]; - } else { - objArr = new Object[map.size() + 2]; - } - - for (Map.Entry entry : map.entrySet()) { - if (entry.getValue() != null) { - if (String.valueOf(entry.getValue()).equalsIgnoreCase("false") - || String.valueOf(entry.getValue()).equalsIgnoreCase("true")) - objArr[pointer] = entry.getValue(); - else - objArr[pointer] = String.valueOf(entry.getValue()); - } else - objArr[pointer] = entry.getValue(); - - pointer++; - } - - if (recordCheck == 0) { - syncDataListInsert.add(objArr); - } else { - - - objArr[pointer] = String.valueOf(vanSerialNo); - - if ((syncTableName.equalsIgnoreCase("t_patientissue") - || syncTableName.equalsIgnoreCase("t_physicalstockentry") - || syncTableName.equalsIgnoreCase("t_stockadjustment") - || syncTableName.equalsIgnoreCase("t_saitemmapping") - || syncTableName.equalsIgnoreCase("t_stocktransfer") - || syncTableName.equalsIgnoreCase("t_patientreturn") - || syncTableName.equalsIgnoreCase("t_facilityconsumption") - || syncTableName.equalsIgnoreCase("t_indent") - || syncTableName.equalsIgnoreCase("t_indentorder") - || syncTableName.equalsIgnoreCase("t_indentissue") - || syncTableName.equalsIgnoreCase("t_itemstockentry") - || syncTableName.equalsIgnoreCase("t_itemstockexit")) - && map.containsKey("SyncFacilityID")) { - - objArr[pointer + 1] = String.valueOf(map.get("SyncFacilityID")); - } else - objArr[pointer + 1] = String.valueOf(vanID); - - syncDataListUpdate.add(objArr); - } - - } - - int[] i = null; - if (syncDataListInsert != null && syncDataListInsert.size() > 0) { - // schema name hard coded(Insert query builder) - String queryInsert = getQueryToInsertDataToServerDB(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(),syncUploadDataDigester.getServerColumns()); - - // call repository to execute the query with given data list(Insert) - i = dataSyncRepositoryCentral.syncDataToCentralDB( - syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(), syncUploadDataDigester.getServerColumns(), queryInsert, - syncDataListInsert); - } - - int[] j = null; - if (syncDataListUpdate != null && syncDataListUpdate.size() > 0) { - // schema name hard coded(Update query builder) - String queryUpdate = getQueryToUpdateDataToServerDB(syncUploadDataDigester.getSchemaName(), syncUploadDataDigester.getServerColumns(), - syncUploadDataDigester.getTableName()); - - // call repository to execute the query with given data list(Update) - j = dataSyncRepositoryCentral.syncDataToCentralDB(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(), ServerColumnsNotRequired, queryUpdate, - syncDataListUpdate); - } - - // validating if data sync successfully - if ((i != null && syncDataListInsert.size() != i.length) - || (j != null && syncDataListUpdate.size() != j.length)) - return null; - else - return "data sync passed"; - - } - - } - - public String update_M_BeneficiaryRegIdMapping_for_provisioned_benID( - SyncUploadDataDigester syncUploadDataDigester) { - String returnOBJ = null; - List> dataToBesync = syncUploadDataDigester.getSyncData(); - - Object[] objArr; - // sync data 'list of object array' - List syncData = new ArrayList<>(); - - String query = getqueryFor_M_BeneficiaryRegIdMapping(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName()); - - for (Map map : dataToBesync) { - if (map.get("BenRegId") != null && map.get("BeneficiaryID") != null && map.get("VanID") != null) { - objArr = new Object[4]; - objArr[0] = String.valueOf(syncUploadDataDigester.getSyncedBy()); - objArr[1] = String.valueOf(map.get("BenRegId")); - objArr[2] = String.valueOf(map.get("BeneficiaryID")); - objArr[3] = String.valueOf(map.get("VanID")); - - syncData.add(objArr); - } - } - int[] i = null; - - if (syncData != null && syncData.size() > 0) { - i = dataSyncRepositoryCentral.syncDataToCentralDB(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(), ServerColumnsNotRequired, query, syncData); - - if (i.length == syncData.size()) { - returnOBJ = "data sync passed"; - } - } else { - returnOBJ = "data sync passed"; - } - - return returnOBJ; - - } - - private String getqueryFor_M_BeneficiaryRegIdMapping(String schemaName, String tableName) { - - StringBuilder queryBuilder = new StringBuilder(" UPDATE "); - queryBuilder.append(schemaName+"."+tableName); - queryBuilder.append(" SET "); - queryBuilder.append("Provisioned = true, SyncedDate = now(), syncedBy = ?"); - queryBuilder.append(" WHERE "); - queryBuilder.append(" BenRegId = ? "); - queryBuilder.append(" AND "); - queryBuilder.append(" BeneficiaryID = ? "); - queryBuilder.append(" AND "); - queryBuilder.append(" VanID = ? "); - String query = queryBuilder.toString(); - return query; - } - - public String getQueryToInsertDataToServerDB(String schemaName, String tableName, String serverColumns) { - String[] columnsArr = null; - if (serverColumns != null) - columnsArr = serverColumns.split(","); - - StringBuilder preparedStatementSetter = new StringBuilder(); - /// StringBuilder updateStatement = new StringBuilder(); - - if (columnsArr != null && columnsArr.length > 0) { - int index = 0; - for (String column : columnsArr) { - if (index == columnsArr.length - 1) { - preparedStatementSetter.append(" ? "); - - } else { - preparedStatementSetter.append(" ?, "); - - } - index++; - } - } - /* - * String query = "INSERT INTO " + schemaName + "." + tableName + "( " + - * serverColumns + ") VALUES ( " + preparedStatementSetter + " ) "; - */ - - StringBuilder queryBuilder = new StringBuilder("INSERT INTO "); - queryBuilder.append(schemaName + "." + tableName); - queryBuilder.append("("); -// queryBuilder.append("?"); - queryBuilder.append(serverColumns); - queryBuilder.append(") VALUES ("); - queryBuilder.append(preparedStatementSetter); - queryBuilder.append(") "); - String query = queryBuilder.toString(); - - return query; - } - - public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { - String[] columnsArr = null; - if (serverColumns != null) - columnsArr = serverColumns.split(","); - - StringBuilder preparedStatementSetter = new StringBuilder(); - - if (columnsArr != null && columnsArr.length > 0) { - int index = 0; - for (String column : columnsArr) { - if (index == columnsArr.length - 1) { - preparedStatementSetter.append(column); - preparedStatementSetter.append("= ?"); - } else { - preparedStatementSetter.append(column); - preparedStatementSetter.append("= ?, "); - } - index++; - } - } - - if (tableName.equalsIgnoreCase("t_patientissue") || tableName.equalsIgnoreCase("t_physicalstockentry") - || tableName.equalsIgnoreCase("t_stockadjustment") || tableName.equalsIgnoreCase("t_saitemmapping") - || tableName.equalsIgnoreCase("t_stocktransfer") || tableName.equalsIgnoreCase("t_patientreturn") - || tableName.equalsIgnoreCase("t_facilityconsumption") || tableName.equalsIgnoreCase("t_indent") - || tableName.equalsIgnoreCase("t_indentorder") || tableName.equalsIgnoreCase("t_indentissue") - || tableName.equalsIgnoreCase("t_itemstockentry") || tableName.equalsIgnoreCase("t_itemstockexit")) { - - StringBuilder queryBuilder = new StringBuilder(" UPDATE "); - queryBuilder.append(schemaName+"."+tableName); - queryBuilder.append(" SET "); - queryBuilder.append(preparedStatementSetter); - queryBuilder.append(" WHERE "); - queryBuilder.append(" VanSerialNo =? "); - queryBuilder.append(" AND "); - queryBuilder.append(" SyncFacilityID = ? "); - String query = queryBuilder.toString(); - return query; - } else { - StringBuilder queryBuilder = new StringBuilder(" UPDATE "); - queryBuilder.append(schemaName+"."+tableName); - queryBuilder.append(" SET "); - queryBuilder.append(preparedStatementSetter); - queryBuilder.append(" WHERE "); - queryBuilder.append(" VanSerialNo =? "); - queryBuilder.append(" AND "); - queryBuilder.append(" VanID = ? "); - String query = queryBuilder.toString(); - return query; - } - - } + private static final String SERVER_COLUMNS_NOT_REQUIRED = null; // Renamed for clarity + private static final Logger logger = LoggerFactory.getLogger(GetDataFromVanAndSyncToDBImpl.class); + + @Autowired + private DataSyncRepositoryCentral dataSyncRepositoryCentral; + + private static final Map> TABLE_GROUPS = new HashMap<>(); + private static final Set VALID_SCHEMAS = new HashSet<>(Arrays.asList("public", "db_iemr")); // Add your actual schema names + private static final Set VALID_TABLES = new HashSet<>(Arrays.asList( + "m_beneficiaryregidmapping", "i_beneficiaryaccount","i_beneficiaryaddress","i_beneficiarycontacts","i_beneficiarydetails","i_beneficiaryfamilymapping","i_beneficiaryidentity","i_beneficiarymapping", + "t_benvisitdetail","t_phy_anthropometry","t_phy_vitals","t_benadherence","t_anccare","t_pnccare","t_ncdscreening","t_ncdcare","i_ben_flow_outreach","t_covid19","t_idrsdetails","t_physicalactivity", + "t_phy_generalexam","t_phy_headtotoe","t_sys_obstetric","t_sys_gastrointestinal","t_sys_cardiovascular","t_sys_respiratory","t_sys_centralnervous","t_sys_musculoskeletalsystem","t_sys_genitourinarysystem", + "t_ancdiagnosis","t_ncddiagnosis","t_pncdiagnosis","t_benchefcomplaint","t_benclinicalobservation","t_prescription","t_prescribeddrug","t_lab_testorder","t_benreferdetails", + "t_lab_testresult","t_physicalstockentry","t_patientissue","t_facilityconsumption","t_itemstockentry","t_itemstockexit", + "t_benmedhistory","t_femaleobstetrichistory","t_benmenstrualdetails","t_benpersonalhabit","t_childvaccinedetail1","t_childvaccinedetail2","t_childoptionalvaccinedetail","t_ancwomenvaccinedetail","t_childfeedinghistory","t_benallergyhistory","t_bencomorbiditycondition","t_benmedicationhistory","t_benfamilyhistory","t_perinatalhistory","t_developmenthistory", + "t_cancerfamilyhistory","t_cancerpersonalhistory","t_cancerdiethistory","t_cancerobstetrichistory","t_cancervitals","t_cancersignandsymptoms","t_cancerlymphnode","t_canceroralexamination","t_cancerbreastexamination","t_cancerabdominalexamination","t_cancergynecologicalexamination","t_cancerdiagnosis","t_cancerimageannotation", + "i_beneficiaryimage", + "t_stockadjustment","t_stocktransfer","t_patientreturn","t_indent","t_indentissue","t_indentorder","t_saitemmapping" + )); + + static { + + TABLE_GROUPS.put(1, Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount","i_beneficiaryaddress","i_beneficiarycontacts","i_beneficiarydetails","i_beneficiaryfamilymapping","i_beneficiaryidentity","i_beneficiarymapping")); + + TABLE_GROUPS.put(2, Arrays.asList("t_benvisitdetail","t_phy_anthropometry","t_phy_vitals","t_benadherence","t_anccare","t_pnccare","t_ncdscreening","t_ncdcare","i_ben_flow_outreach","t_covid19","t_idrsdetails","t_physicalactivity")); + + TABLE_GROUPS.put(3, Arrays.asList("t_phy_generalexam","t_phy_headtotoe","t_sys_obstetric","t_sys_gastrointestinal","t_sys_cardiovascular","t_sys_respiratory","t_sys_centralnervous","t_sys_musculoskeletalsystem","t_sys_genitourinarysystem")); + + TABLE_GROUPS.put(4, Arrays.asList("t_ancdiagnosis","t_ncddiagnosis","t_pncdiagnosis","t_benchefcomplaint","t_benclinicalobservation","t_prescription","t_prescribeddrug","t_lab_testorder","t_benreferdetails")); + + TABLE_GROUPS.put(5, Arrays.asList("t_lab_testresult","t_physicalstockentry","t_patientissue","t_facilityconsumption","t_itemstockentry","t_itemstockexit")); + + TABLE_GROUPS.put(6, Arrays.asList("t_benmedhistory","t_femaleobstetrichistory","t_benmenstrualdetails","t_benpersonalhabit","t_childvaccinedetail1","t_childvaccinedetail2","t_childoptionalvaccinedetail","t_ancwomenvaccinedetail","t_childfeedinghistory","t_benallergyhistory","t_bencomorbiditycondition","t_benmedicationhistory","t_benfamilyhistory","t_perinatalhistory","t_developmenthistory")); + + TABLE_GROUPS.put(7, Arrays.asList("t_cancerfamilyhistory","t_cancerpersonalhistory","t_cancerdiethistory","t_cancerobstetrichistory","t_cancervitals","t_cancersignandsymptoms","t_cancerlymphnode","t_canceroralexamination","t_cancerbreastexamination","t_cancerabdominalexamination","t_cancergynecologicalexamination","t_cancerdiagnosis","t_cancerimageannotation")); + + TABLE_GROUPS.put(8, Arrays.asList("i_beneficiaryimage")); + + TABLE_GROUPS.put(9, Arrays.asList("t_itemstockentry","t_itemstockexit","t_patientissue","t_physicalstockentry","t_stockadjustment","t_stocktransfer","t_patientreturn","t_facilityconsumption","t_indent","t_indentissue","t_indentorder","t_saitemmapping")); + + } + + public String syncDataToServer(String requestOBJ, String Authorization, String token) throws Exception { + logger.info("Starting syncDataToServer. Token: {}", token); + ObjectMapper mapper = new ObjectMapper(); + SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); + + if (syncUploadDataDigester == null || syncUploadDataDigester.getTableName() == null) { + logger.error("Invalid SyncUploadDataDigester object or tableName is null."); + return "Error: Invalid sync request."; + } + + String syncTableName = syncUploadDataDigester.getTableName(); + String schemaName = syncUploadDataDigester.getSchemaName(); + + if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { + logger.error("Invalid schema or table name provided: Schema='{}', Table='{}'.", schemaName, syncTableName); + return "Error: Invalid schema or table name."; + } + + + // Handle specific tables first, if their logic is distinct + if ("m_beneficiaryregidmapping".equalsIgnoreCase(syncTableName)) { + String result = update_M_BeneficiaryRegIdMapping_for_provisioned_benID(syncUploadDataDigester); + if ("data sync passed".equals(result)) { + return "Sync successful for m_beneficiaryregidmapping."; + } else { + logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); + return "Sync failed for m_beneficiaryregidmapping."; + } + } + if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { + String result = update_I_BeneficiaryDetails_for_processed_in_batches(syncUploadDataDigester); + if ("data sync passed".equals(result)) { + return "Sync successful for i_beneficiarydetails."; + } else { + logger.error("Sync failed for i_beneficiarydetails: {}", result); + return "Sync failed for i_beneficiarydetails."; + } + } else { + // Determine the group for the current table or iterate through all if no specific table is given + boolean syncSuccess = true; + String errorMessage = ""; + + // If a specific table is provided in the request, try to find its group and sync only that table. + // Otherwise, iterate through all defined groups. + if (syncTableName != null && !syncTableName.isEmpty()) { + boolean foundInGroup = false; + for (Map.Entry> entry : TABLE_GROUPS.entrySet()) { + if (entry.getValue().contains(syncTableName.toLowerCase())) { + logger.info("Attempting to sync table '{}' from Group {}", syncTableName, entry.getKey()); + syncSuccess = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), syncTableName, syncUploadDataDigester); + foundInGroup = true; + break; + } + } + if (!foundInGroup) { + logger.warn("Table '{}' not found in any predefined groups. Proceeding with generic sync logic.", syncTableName); + syncSuccess = performGenericTableSync(syncUploadDataDigester); + } + } else { + // If no specific table is in the request (e.g., a general sync trigger), iterate through groups + logger.info("No specific table provided. Attempting to sync all tables group by group."); + for (Map.Entry> entry : TABLE_GROUPS.entrySet()) { + Integer groupId = entry.getKey(); + List tablesInGroup = entry.getValue(); + logger.info("Starting sync for Group {}", groupId); + for (String table : tablesInGroup) { + if (!isValidTableName(table)) { + logger.error("Invalid table name '{}' encountered in group {}. Skipping.", table, groupId); + syncSuccess = false; + errorMessage += "Invalid table name: " + table + " in Group " + groupId + ". "; + continue; // Skip this table + } + + try { + + boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), table, syncUploadDataDigester); + if (!currentTableSyncResult) { + syncSuccess = false; + errorMessage += "Failed to sync table: " + table + " in Group " + groupId + ". "; + logger.error("Sync failed for table '{}' in Group {}. Error: {}", table, groupId, errorMessage); + } else { + logger.info("Successfully synced table: {} in Group {}", table, groupId); + } + } catch (Exception e) { + syncSuccess = false; + errorMessage += "Exception during sync for table: " + table + " in Group " + groupId + ": " + e.getMessage() + ". "; + logger.error("Exception during sync for table '{}' in Group {}: {}", table, groupId, e.getMessage(), e); + } + } + } + } + + if (syncSuccess) { + logger.info("Overall data sync passed."); + return "Overall data sync passed."; + } else { + logger.info("Overall data sync failed. Details: " + errorMessage); + return "Overall data sync failed. Details: " + errorMessage; + } + } + } + + + private boolean syncTablesInGroup(String schemaName, String currentTableName, SyncUploadDataDigester originalDigester) { + logger.info("Attempting generic sync for table: {}", currentTableName); + + // Validate schemaName and currentTableName for safety before proceeding + if (!isValidSchemaName(schemaName) || !isValidTableName(currentTableName)) { + logger.error("Invalid schema or table name for group sync: Schema='{}', Table='{}'.", schemaName, currentTableName); + return false; // Fail fast if identifiers are invalid + } + + SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); + tableSpecificDigester.setSchemaName(schemaName); + tableSpecificDigester.setTableName(currentTableName); + tableSpecificDigester.setSyncedBy(originalDigester.getSyncedBy()); + tableSpecificDigester.setFacilityID(originalDigester.getFacilityID()); + tableSpecificDigester.setVanAutoIncColumnName(originalDigester.getVanAutoIncColumnName()); + tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); // Assuming serverColumns is generic or set per table + + tableSpecificDigester.setSyncData(originalDigester.getSyncData()); // Placeholder: Replace with actual data fetching + + return performGenericTableSync(tableSpecificDigester); + } + + + private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID(SyncUploadDataDigester syncUploadDataDigester) { + logger.info("Processing update_M_BeneficiaryRegIdMapping_for_provisioned_benID for table: {}", syncUploadDataDigester.getTableName()); + + String schemaName = syncUploadDataDigester.getSchemaName(); + String tableName = syncUploadDataDigester.getTableName(); + + if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + logger.error("Invalid schema or table name provided for m_beneficiaryregidmapping update: Schema='{}', Table='{}'.", schemaName, tableName); + return "Error: Invalid schema or table name."; + } + + List> dataToBesync = syncUploadDataDigester.getSyncData(); + List syncData = new ArrayList<>(); + + String query = String.format("UPDATE %s.%s SET Provisioned = true, SyncedDate = now(), SyncedBy = ? WHERE BenRegId = ? AND BeneficiaryID = ? AND VanID = ?", schemaName, tableName); + + for (Map map : dataToBesync) { + if (map.get("BenRegId") != null && map.get("BeneficiaryID") != null && map.get("VanID") != null) { + Object[] objArr = new Object[4]; + objArr[0] = syncUploadDataDigester.getSyncedBy(); // SyncedBy + objArr[1] = String.valueOf(map.get("BenRegId")); + objArr[2] = String.valueOf(map.get("BeneficiaryID")); + objArr[3] = String.valueOf(map.get("VanID")); + syncData.add(objArr); + } else { + logger.warn("Skipping record in m_beneficiaryregidmapping due to missing BenRegId, BeneficiaryID, or VanID: {}", map); + } + } + + if (!syncData.isEmpty()) { + try { + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, + tableName, SERVER_COLUMNS_NOT_REQUIRED, query, syncData); + + if (i.length == syncData.size()) { + logger.info("Successfully updated {} records for m_beneficiaryregidmapping.", i.length); + return "data sync passed"; + } else { + logger.error("Partial update for m_beneficiaryregidmapping. Expected {} updates, got {}. Failed records: {}", syncData.size(), i.length, getFailedRecords(i, syncData)); + return "Partial data sync for m_beneficiaryregidmapping."; + } + } catch (Exception e) { + logger.error("Exception during update for m_beneficiaryregidmapping: {}", e.getMessage(), e); + return "Error during sync for m_beneficiaryregidmapping: " + e.getMessage(); + } + } else { + logger.info("No data to sync for m_beneficiaryregidmapping."); + return "data sync passed"; + } + } + + + public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDataDigester syncUploadDataDigester) { + logger.info("Processing update_I_BeneficiaryDetails_for_processed_in_batches for table: {}", syncUploadDataDigester.getTableName()); + String schemaName = syncUploadDataDigester.getSchemaName(); + String tableName = syncUploadDataDigester.getTableName(); + + if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + logger.error("Invalid schema or table name provided for i_beneficiarydetails update: Schema='{}', Table='{}'.", schemaName, tableName); + return "Error: Invalid schema or table name."; + } + + List syncData = new ArrayList<>(); // This list will hold data for batch updates to 'Processed' + + String updateQuery = getQueryFor_I_BeneficiaryDetails(schemaName, tableName); + + int limit = 1000; + int offset = 0; + int totalProcessed = 0; + + String whereClauseForBatchFetch = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // This is for fetching, not for update + + while (true) { + List> batchToFetch; + try { + batchToFetch = dataSyncRepositoryCentral.getBatchForBenDetails( + syncUploadDataDigester, + whereClauseForBatchFetch, + limit, + offset); + } catch (Exception e) { + logger.error("Error fetching batch for i_beneficiarydetails: {}", e.getMessage(), e); + return "Error fetching data for i_beneficiarydetails: " + e.getMessage(); + } + + if (batchToFetch.isEmpty()) { + break; + } + + for (Map map : batchToFetch) { + if (map.get("BeneficiaryDetailsId") != null && map.get("VanID") != null) { + Object[] params = new Object[3]; + params[0] = syncUploadDataDigester.getSyncedBy(); + params[1] = String.valueOf(map.get("BeneficiaryDetailsId")); + params[2] = String.valueOf(map.get("VanID")); + syncData.add(params); + } else { + logger.warn("Skipping record in i_beneficiarydetails due to missing BeneficiaryDetailsId or VanID: {}", map); + } + } + + if (!syncData.isEmpty()) { + try { + int[] batchUpdateResults = dataSyncRepositoryCentral.syncDataToCentralDB( + schemaName, + tableName, + SERVER_COLUMNS_NOT_REQUIRED, + updateQuery, + syncData); + + int successfulUpdates = 0; + for (int result : batchUpdateResults) { + if (result >= 1) { + successfulUpdates++; + } + } + totalProcessed += successfulUpdates; + logger.info("Batch update for i_beneficiarydetails: {} records processed, {} successfully updated.", syncData.size(), successfulUpdates); + + syncData.clear(); + offset += limit; + + } catch (Exception e) { + logger.error("Exception during batch update for i_beneficiarydetails: {}", e.getMessage(), e); + return "Error during sync for i_beneficiarydetails: " + e.getMessage(); + } + } else { + logger.info("No valid records in the current batch for i_beneficiarydetails to update."); + offset += limit; + } + } + + if (totalProcessed > 0) { + logger.info("Finished processing i_beneficiarydetails. Total records processed: {}", totalProcessed); + return "data sync passed"; + } else { + logger.info("No records were processed for i_beneficiarydetails."); + return "No data processed for i_beneficiarydetails."; + } + } + + private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableName) { + if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + logger.error("Invalid schema or table name for getQueryFor_I_BeneficiaryDetails: Schema='{}', Table='{}'.", schemaName, tableName); + throw new IllegalArgumentException("Invalid schema or table name provided."); + } + return String.format("UPDATE %s.%s SET Processed = 'P', SyncedDate = now(), SyncedBy = ? WHERE BeneficiaryDetailsId = ? AND VanID = ?", schemaName, tableName); + } + + + + private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester) { + logger.info("Performing generic sync for table: {}", syncUploadDataDigester.getTableName()); + + String schemaName = syncUploadDataDigester.getSchemaName(); + String syncTableName = syncUploadDataDigester.getTableName(); + String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); + String serverColumns = syncUploadDataDigester.getServerColumns(); + + if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { + logger.error("Invalid schema or table name for generic sync: Schema='{}', Table='{}'.", schemaName, syncTableName); + return false; + } + + if (!isValidColumnNames(serverColumns)) { + logger.error("Invalid server columns provided for generic sync: {}", serverColumns); + return false; + } + + + List> dataToBesync = syncUploadDataDigester.getSyncData(); + List syncDataListInsert = new ArrayList<>(); + List syncDataListUpdate = new ArrayList<>(); + + if (dataToBesync == null || dataToBesync.isEmpty()) { + logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); + return true; // Nothing to sync, consider it a success + } + + Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); + + for (Map map : dataToBesync) { + String vanSerialNo = String.valueOf(map.get(vanAutoIncColumnName)); + String vanID = String.valueOf(map.get("VanID")); + int syncFacilityID = 0; + + map.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); + map.put("SyncedDate", String.valueOf(LocalDateTime.now())); // Ensure column name matches DB + + if (facilityIDFromDigester != null) { + switch (syncTableName.toLowerCase()) { + case "t_indent": + case "t_indentorder": { + if (map.containsKey("FromFacilityID") && map.get("FromFacilityID") instanceof Double) { + Double fromFacilityID = (Double) map.get("FromFacilityID"); + if (fromFacilityID.intValue() == facilityIDFromDigester) { + map.put("Processed", "P"); + } + } + break; + } + case "t_indentissue": { + if (map.containsKey("ToFacilityID") && map.get("ToFacilityID") instanceof Double) { + Double toFacilityID = (Double) map.get("ToFacilityID"); + if (toFacilityID.intValue() == facilityIDFromDigester) { + map.put("Processed", "P"); + } + } + break; + } + case "t_stocktransfer": { + if (map.containsKey("TransferToFacilityID") && map.get("TransferToFacilityID") instanceof Double) { + Double transferToFacilityID = (Double) map.get("TransferToFacilityID"); + if (transferToFacilityID.intValue() == facilityIDFromDigester) { + map.put("Processed", "P"); + } + } + break; + } + case "t_itemstockentry": { + if (map.containsKey("FacilityID") && map.get("FacilityID") instanceof Double) { + Double mapFacilityID = (Double) map.get("FacilityID"); + if (mapFacilityID.intValue() == facilityIDFromDigester) { + map.put("Processed", "P"); + } + } + break; + } + default: + // No specific facility ID logic for other tables, maintain existing 'Processed' status or default + break; + } + } + + // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot + if (map.containsKey("SyncFacilityID") && map.get("SyncFacilityID") instanceof Integer) { + syncFacilityID = (Integer) map.get("SyncFacilityID"); + } else if (map.containsKey("SyncFacilityID") && map.get("SyncFacilityID") instanceof Double) { + syncFacilityID = ((Double) map.get("SyncFacilityID")).intValue(); + } + + + int recordCheck; + try { + recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( + schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); + } catch (Exception e) { + logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", syncTableName, vanSerialNo, vanID, e.getMessage(), e); + return false; // Critical error, stop sync for this table + } + + // Prepare Object array for insert/update + Object[] objArr; + List serverColumnsList = Arrays.asList(serverColumns.split(",")); + List currentRecordValues = new ArrayList<>(); + + for (String column : serverColumnsList) { + Object value = map.get(column.trim()); + // Handle boolean conversion if necessary, though String.valueOf should generally work for prepared statements + if (value instanceof Boolean) { + currentRecordValues.add(value); + } else if (value != null) { + currentRecordValues.add(String.valueOf(value)); + } else { + currentRecordValues.add(null); + } + } + + objArr = currentRecordValues.toArray(); + + if (recordCheck == 0) { + syncDataListInsert.add(objArr); + } else { + // For update, append the WHERE clause parameters at the end of the array + List updateParams = new ArrayList<>(Arrays.asList(objArr)); + updateParams.add(String.valueOf(vanSerialNo)); + + if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(syncTableName.toLowerCase()) && map.containsKey("SyncFacilityID")) { + updateParams.add(String.valueOf(map.get("SyncFacilityID"))); + } else { + updateParams.add(String.valueOf(vanID)); + } + syncDataListUpdate.add(updateParams.toArray()); + } + } + + boolean insertSuccess = true; + boolean updateSuccess = true; + + if (!syncDataListInsert.isEmpty()) { + String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); + try { + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryInsert, syncDataListInsert); + if (i.length != syncDataListInsert.size()) { + insertSuccess = false; + logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", syncTableName, syncDataListInsert.size(), i.length, getFailedRecords(i, syncDataListInsert)); + } else { + logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); + } + } catch (Exception e) { + insertSuccess = false; + logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); + } + } + + if (!syncDataListUpdate.isEmpty()) { + String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); + try { + int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); + if (j.length != syncDataListUpdate.size()) { + updateSuccess = false; + logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", syncTableName, syncDataListUpdate.size(), j.length, getFailedRecords(j, syncDataListUpdate)); + } else { + logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); + } + } catch (Exception e) { + updateSuccess = false; + logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); + } + } + return insertSuccess && updateSuccess; + } + + private String getQueryToInsertDataToServerDB(String schemaName, String tableName, String serverColumns) { + if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + logger.error("Invalid schema or table name for getQueryToInsertDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); + throw new IllegalArgumentException("Invalid schema or table name provided."); + } + if (!isValidColumnNames(serverColumns)) { + logger.error("Invalid server columns provided for getQueryToInsertDataToServerDB: {}", serverColumns); + throw new IllegalArgumentException("Invalid column names provided."); + } + + + String[] columnsArr = serverColumns.split(","); + StringBuilder preparedStatementSetter = new StringBuilder(); + + for (int i = 0; i < columnsArr.length; i++) { + preparedStatementSetter.append("?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } + } + + return String.format("INSERT INTO %s.%s(%s) VALUES (%s)", schemaName, tableName, serverColumns, preparedStatementSetter.toString()); + } + + public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { + if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + logger.error("Invalid schema or table name for getQueryToUpdateDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); + throw new IllegalArgumentException("Invalid schema or table name provided."); + } + if (!isValidColumnNames(serverColumns)) { + logger.error("Invalid server columns provided for getQueryToUpdateDataToServerDB: {}", serverColumns); + throw new IllegalArgumentException("Invalid column names provided."); + } + + String[] columnsArr = serverColumns.split(","); + StringBuilder preparedStatementSetter = new StringBuilder(); + + for (int i = 0; i < columnsArr.length; i++) { + String column = columnsArr[i].trim(); + if (!isValidColumnName(column)) { + logger.error("Invalid individual column name encountered: {}", column); + throw new IllegalArgumentException("Invalid individual column name provided: " + column); + } + + preparedStatementSetter.append(column); + preparedStatementSetter.append(" = ?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } + } + + StringBuilder queryBuilder = new StringBuilder(); + queryBuilder.append(String.format("UPDATE %s.%s SET %s WHERE VanSerialNo = ?", schemaName, tableName, preparedStatementSetter.toString())); + + + if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(tableName.toLowerCase())) { + queryBuilder.append(" AND SyncFacilityID = ? "); + } else { + queryBuilder.append(" AND VanID = ? "); + } + return queryBuilder.toString(); + } + + private boolean isValidSchemaName(String schemaName) { + return VALID_SCHEMAS.contains(schemaName.toLowerCase()); + } + + private boolean isValidTableName(String tableName) { + return VALID_TABLES.contains(tableName.toLowerCase()); + } + + private boolean isValidColumnName(String columnName) { + return columnName != null && columnName.matches("^[a-zA-Z_][a-zA-Z0-9_]*$"); + } + + private boolean isValidColumnNames(String columnNames) { + if (columnNames == null || columnNames.trim().isEmpty()) { + return false; + } + String[] cols = columnNames.split(","); + for (String col : cols) { + if (!isValidColumnName(col.trim())) { + return false; + } + } + return true; + } + + + private String getFailedRecords(int[] results, List data) { + List failedRecordsInfo = new ArrayList<>(); + for (int k = 0; k < results.length; k++) { + if (results[k] < 1) { + String idInfo = "N/A"; + if (data.get(k) != null && data.get(k).length > 0) { + idInfo = "Record data size: " + data.get(k).length; + } + failedRecordsInfo.add("Record at index " + k + " (Info: " + idInfo + ")"); + } + } + return String.join("; ", failedRecordsInfo); + } } \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java b/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java index cf07391c..2cbab41d 100644 --- a/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java +++ b/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java @@ -1,3 +1,24 @@ +/* +* AMRIT – Accessible Medical Records via Integrated Technology +* Integrated EHR (Electronic Health Records) Solution +* +* Copyright (C) "Piramal Swasthya Management and Research Institute" +* +* This file is part of AMRIT. +* +* This program is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* This program is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License +* along with this program. If not, see https://www.gnu.org/licenses/. +*/ package com.iemr.mmu.utils; import org.slf4j.Logger; @@ -23,16 +44,19 @@ public static HttpEntity createRequestEntity(Object body, String authori headers.add(HttpHeaders.AUTHORIZATION, "Bearer " + authorization); } - if (jwtToken == null || jwtToken.isEmpty()) { - ServletRequestAttributes attrs = - (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); - if (attrs != null) { - HttpServletRequest request = attrs.getRequest(); - try { - jwtToken = CookieUtil.getJwtTokenFromCookie(request); - } catch (Exception e) { - logger.error("Error while getting JWT token from cookie: {}", e.getMessage()); - } + ServletRequestAttributes attrs = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); + + if ((jwtToken == null || jwtToken.isEmpty()) && attrs != null) { + HttpServletRequest request = attrs.getRequest(); + try { + jwtToken = CookieUtil.getJwtTokenFromCookie(request); + } catch (Exception e) { + logger.error("Error while getting JWT token from cookie: {}", e.getMessage()); + } + + String jwtTokenHeader = request.getHeader("JwtToken"); + if (jwtTokenHeader != null && !jwtTokenHeader.isEmpty()) { + jwtToken = jwtTokenHeader; } } @@ -41,7 +65,6 @@ public static HttpEntity createRequestEntity(Object body, String authori headers.add(HttpHeaders.COOKIE, "Jwttoken=" + jwtToken); } - return new HttpEntity<>(body, headers); } } \ No newline at end of file diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index d172f4aa..2e6ce84f 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -7,6 +7,8 @@ spring.datasource.tomcat.remove-abandoned=true spring.datasource.tomcat.remove-abandoned-timeout=1800 spring.datasource.tomcat.logAbandoned=true spring.datasource.continue-on-error=true +spring.datasource.tomcat.max-wait=60000 + ## below line added by neeraj for reset abandoned DB connection from connection pool spring.datasource.tomcat.jdbc-interceptors=ResetAbandonedTimer @@ -46,3 +48,8 @@ logging.level.org.springframework=INFO spring.main.allow-circular-references=true spring.main.allow-bean-definition-overriding=true + +spring.datasource.tomcat.testOnBorrow=true +spring.datasource.tomcat.validationQuery=SELECT 1 +spring.datasource.tomcat.validationInterval=30000 # 30 sec +logging.level.org.apache.tomcat.jdbc.pool=DEBUG From 2b7f4c3a8b74a64f1c18469cbbf2af4e12451967 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 30 Jul 2025 16:41:17 +0530 Subject: [PATCH 07/23] fix: add functionality to save the file ID's uploaded from doctor screen (#99) --- .../ncdscreening/NCDScreeningServiceImpl.java | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/src/main/java/com/iemr/mmu/service/ncdscreening/NCDScreeningServiceImpl.java b/src/main/java/com/iemr/mmu/service/ncdscreening/NCDScreeningServiceImpl.java index a395eec1..9f1018a7 100644 --- a/src/main/java/com/iemr/mmu/service/ncdscreening/NCDScreeningServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/ncdscreening/NCDScreeningServiceImpl.java @@ -1182,6 +1182,30 @@ public Long saveDoctorData(JsonObject requestOBJ, String Authorization) throws E TcSpecialistSlotBookingRequestOBJ tcSpecialistSlotBookingRequestOBJ = null; CommonUtilityClass commonUtilityClass = InputMapper.gson().fromJson(requestOBJ, CommonUtilityClass.class); + if (requestOBJ.has("visitDetails") && !requestOBJ.get("visitDetails").isJsonNull()) { + JsonObject visitWrapperObj = requestOBJ.getAsJsonObject("visitDetails"); + JsonObject visitDetailsObj = visitWrapperObj.getAsJsonObject("visitDetails"); + + if (visitDetailsObj.has("fileIDs") && visitDetailsObj.get("fileIDs").isJsonArray()) { + JsonArray fileIDs = visitDetailsObj.getAsJsonArray("fileIDs"); + StringBuilder fileIDBuilder = new StringBuilder(); + for (JsonElement fileIdElement : fileIDs) { + if (!fileIdElement.isJsonNull()) { + fileIDBuilder.append(fileIdElement.getAsString()).append(","); + } + } + + if (fileIDBuilder.length() > 0) { + fileIDBuilder.setLength(fileIDBuilder.length() - 1); + + benVisitDetailRepo.updateFileID( + fileIDBuilder.toString(), + commonUtilityClass.getBeneficiaryRegID(), + commonUtilityClass.getVisitCode() + ); + } + } +} if (commonUtilityClass != null && commonUtilityClass.getServiceID() != null && commonUtilityClass.getServiceID() == 4 && requestOBJ != null && requestOBJ.has("tcRequest") && requestOBJ.get("tcRequest") != null) { From 1aadead0c331f5c0c25f4a718ed72f425dc13bf6 Mon Sep 17 00:00:00 2001 From: 5Amogh Date: Thu, 31 Jul 2025 16:14:58 +0530 Subject: [PATCH 08/23] story: amm-1668 task - 1754 --- .../iemr/mmu/repo/benFlowStatus/BeneficiaryFlowStatusRepo.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/com/iemr/mmu/repo/benFlowStatus/BeneficiaryFlowStatusRepo.java b/src/main/java/com/iemr/mmu/repo/benFlowStatus/BeneficiaryFlowStatusRepo.java index d996f43e..d4adc10e 100644 --- a/src/main/java/com/iemr/mmu/repo/benFlowStatus/BeneficiaryFlowStatusRepo.java +++ b/src/main/java/com/iemr/mmu/repo/benFlowStatus/BeneficiaryFlowStatusRepo.java @@ -75,7 +75,7 @@ public int updateBenFlowStatusTMReferred(@Param("benFlowID") Long benFlowID, @Pa @Query("SELECT t.benFlowID, t.beneficiaryRegID, t.visitDate, t.benName, t.age, t.ben_age_val, t.genderID, t.genderName, " + " t.villageName, t.districtName, t.beneficiaryID, t.servicePointName, t.VisitReason, t.VisitCategory, t.benVisitID, " - + " t.registrationDate, t.benVisitDate, t.visitCode, t.consultationDate FROM BeneficiaryFlowStatus t " + + " t.registrationDate, t.benVisitDate, t.visitCode, t.consultationDate, t.fatherName, t.preferredPhoneNum FROM BeneficiaryFlowStatus t " + " Where t.beneficiaryRegID = :benRegID AND t.benFlowID = :benFlowID ") public ArrayList getBenDetailsForLeftSidePanel(@Param("benRegID") Long benRegID, @Param("benFlowID") Long benFlowID); From ea849644e4f6f47a582939cd33f49a5c3a005129 Mon Sep 17 00:00:00 2001 From: Amoghavarsh <93114621+5Amogh@users.noreply.github.com> Date: Fri, 1 Aug 2025 11:55:47 +0530 Subject: [PATCH 09/23] story: amm-1754 updated response including father name and phone no of the beneficiary (#102) * fix: amm-1754 changing the query to get the expected response similar to hwc * fix: amm-1754 compilation error fix * fix: amm-1754 argument issue fix * fix: amm-1754 argument issue fix * fix: amm-1754 argument issue fix --- .../iemr/mmu/data/benFlowStatus/BeneficiaryFlowStatus.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/iemr/mmu/data/benFlowStatus/BeneficiaryFlowStatus.java b/src/main/java/com/iemr/mmu/data/benFlowStatus/BeneficiaryFlowStatus.java index 4e32b433..0cce7849 100644 --- a/src/main/java/com/iemr/mmu/data/benFlowStatus/BeneficiaryFlowStatus.java +++ b/src/main/java/com/iemr/mmu/data/benFlowStatus/BeneficiaryFlowStatus.java @@ -324,7 +324,7 @@ public BeneficiaryFlowStatus() { public BeneficiaryFlowStatus(Long benFlowID, Long benRegID, Timestamp visitDate, String benName, String age, Integer ageVal, Short genderID, String genderName, String villageName, String districtName, Long beneficiaryID, String servicePoint, String VisitReason, String VisitCategory, Long benVisitID, - Timestamp regDate, Timestamp benVisitDate, Long visitCode, Timestamp consultationDate) { + Timestamp regDate, Timestamp benVisitDate, Long visitCode, Timestamp consultationDate, String fatherName, String preferredPhoneNum) { this.benFlowID = benFlowID; this.beneficiaryRegID = benRegID; this.serviceDate = benVisitDate; @@ -344,6 +344,8 @@ public BeneficiaryFlowStatus(Long benFlowID, Long benRegID, Timestamp visitDate, this.visitCode = visitCode; this.consultationDate = consultationDate; this.bloodGroup = null; + this.fatherName = fatherName; + this.preferredPhoneNum = preferredPhoneNum; } @@ -366,7 +368,8 @@ public static BeneficiaryFlowStatus getBeneficiaryFlowStatusForLeftPanel(ArrayLi (String) objArr[3], (String) objArr[4], (Integer) objArr[5], (Short) objArr[6], (String) objArr[7], (String) objArr[8], (String) objArr[9], (Long) objArr[10], (String) objArr[11], (String) objArr[12], (String) objArr[13], (Long) objArr[14], - (Timestamp) objArr[15], (Timestamp) objArr[16], (Long) objArr[17], (Timestamp) objArr[18]); + (Timestamp) objArr[15], (Timestamp) objArr[16], (Long) objArr[17], (Timestamp) objArr[18], + (String) objArr[19], (String) objArr[20]); } } return obj; From e1d6ece47f903b72b822f6b9699f95a29684982e Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Fri, 1 Aug 2025 14:49:24 +0530 Subject: [PATCH 10/23] Save the files uploaded from Doctor Screen (#100) * fix: add file path in cancer gynecological examination * fix: save the files uploaded from the doctor portal * fix: get file names in the response of gynecological examination --- .../CancerGynecologicalExamination.java | 18 ++++++++ .../cancerScreening/CSNurseServiceImpl.java | 43 +++++++++++++++++-- .../cancerScreening/CSServiceImpl.java | 5 +++ 3 files changed, 62 insertions(+), 4 deletions(-) diff --git a/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java b/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java index a63bbc9b..4e96ba30 100644 --- a/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java +++ b/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java @@ -22,7 +22,9 @@ package com.iemr.mmu.data.doctor; import java.sql.Timestamp; +import java.util.ArrayList; import java.util.List; +import java.util.Map; import jakarta.persistence.Column; import jakarta.persistence.Entity; @@ -37,6 +39,8 @@ import com.google.gson.annotations.Expose; import lombok.Data; +import lombok.Data; + @Entity @Data @Table(name = "t_cancergynecologicalexamination") @@ -104,6 +108,10 @@ public class CancerGynecologicalExamination { @Column(name = "FilePath") private String filePath; + @Expose + @Transient + private ArrayList> files; + @Expose @Column(name = "ExperiencedPostCoitalBleeding") private Boolean experiencedPostCoitalBleeding; @@ -430,4 +438,14 @@ public void setVisitCode(Long visitCode) { this.visitCode = visitCode; } + + public ArrayList> getFiles() { + return files; + } + + public void setFiles(ArrayList> files) { + this.files = files; + } + + } diff --git a/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java b/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java index b6b15f22..77364bbd 100644 --- a/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java @@ -63,10 +63,14 @@ import com.iemr.mmu.repo.nurse.BenPersonalCancerDietHistoryRepo; import com.iemr.mmu.repo.nurse.BenPersonalCancerHistoryRepo; import com.iemr.mmu.repo.nurse.BenVisitDetailRepo; +import com.iemr.mmu.utils.AESEncryption.AESEncryptionDecryption; @Service public class CSNurseServiceImpl implements CSNurseService { private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); + + @Autowired + private AESEncryptionDecryption aESEncryptionDecryption; private BenFamilyCancerHistoryRepo benFamilyCancerHistoryRepo; private BenPersonalCancerHistoryRepo benPersonalCancerHistoryRepo; private BenPersonalCancerDietHistoryRepo benPersonalCancerDietHistoryRepo; @@ -593,10 +597,41 @@ public CancerBreastExamination getBenCancerBreastExaminationData(Long benRegID, } public CancerGynecologicalExamination getBenCancerGynecologicalExaminationData(Long benRegID, Long visitCode) { - CancerGynecologicalExamination cancerGynecologicalExamination = cancerGynecologicalExaminationRepo - .getBenCancerGynecologicalExaminationDetails(benRegID, visitCode); - return cancerGynecologicalExamination; - } + CancerGynecologicalExamination cancerGynecologicalExamination = cancerGynecologicalExaminationRepo + .getBenCancerGynecologicalExaminationDetails(benRegID, visitCode); + + if (cancerGynecologicalExamination != null) { + + String filePathStr = cancerGynecologicalExamination.getFilePath(); + + if (filePathStr != null && !filePathStr.trim().isEmpty()) { + ArrayList> fileList = new ArrayList<>(); + String[] fileIds = filePathStr.split(","); + + for (String str : fileIds) { + if (str != null && !str.trim().isEmpty()) { + try { + String decryptedFilePath = aESEncryptionDecryption.decrypt(str); // Decrypt + String[] tempArr = decryptedFilePath.split("/"); + String fileName = tempArr[tempArr.length - 1]; + + Map fileMap = new HashMap<>(); + fileMap.put("filePath", str); + fileMap.put("fileName", fileName); + + fileList.add(fileMap); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + + cancerGynecologicalExamination.setFiles(fileList); + } + } + + return cancerGynecologicalExamination; +} public CancerSignAndSymptoms getBenCancerSignAndSymptomsData(Long benRegID, Long visitCode) { CancerSignAndSymptoms cancerSignAndSymptoms = cancerSignAndSymptomsRepo diff --git a/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java b/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java index 9a97fbcb..23fdf9d8 100644 --- a/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java @@ -595,6 +595,11 @@ public int updateBenExaminationDetail(JsonObject jsnOBJ) throws Exception { CancerGynecologicalExamination cancerGynecologicalExamination = InputMapper.gson() .fromJson(jsnOBJ.get("gynecologicalDetails"), CancerGynecologicalExamination.class); + if (cancerGynecologicalExamination.getFileIDs() != null) { + cancerGynecologicalExamination.setFilePath( + String.join(",", cancerGynecologicalExamination.getFileIDs())); + } + int ID = cSNurseServiceImpl.updateCancerGynecologicalExaminationDetails(cancerGynecologicalExamination); if (ID > 0) { // gynecologicalDetails stored successfully... From 690e358b5c49560f60b8b4803fe63a9ca9784301 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 5 Aug 2025 14:29:45 +0530 Subject: [PATCH 11/23] fix: cherry-pick the commits from develop --- .../common/transaction/CommonServiceImpl.java | 2 +- .../com/iemr/mmu/utils/RestTemplateUtil.java | 24 +++++++++---------- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java b/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java index 3c962bc1..bd4f7cc0 100644 --- a/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java @@ -650,7 +650,7 @@ public ArrayList getTmCaseSheet(BeneficiaryFlowStatus TmBenFlowOBJ, Bene headers.add("Cookie", "Jwttoken=" + jwtTokenFromCookie); // get TM case sheet by passing TM details - ResponseEntity response = restTemplatePost(tmCentralServer, Authorization, new Gson().toJson(tmReqObj), jwtTokenFromCookie); + ResponseEntity response = restTemplatePost(tmCentralServer, Authorization, new Gson().toJson(tmReqObj)); if (response.getStatusCodeValue() == 200 & response.hasBody()) { JsonObject jsnOBJ = getJsonObj(response); diff --git a/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java b/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java index 2cbab41d..d1ea0efd 100644 --- a/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java +++ b/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java @@ -44,19 +44,16 @@ public static HttpEntity createRequestEntity(Object body, String authori headers.add(HttpHeaders.AUTHORIZATION, "Bearer " + authorization); } - ServletRequestAttributes attrs = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); - - if ((jwtToken == null || jwtToken.isEmpty()) && attrs != null) { - HttpServletRequest request = attrs.getRequest(); - try { - jwtToken = CookieUtil.getJwtTokenFromCookie(request); - } catch (Exception e) { - logger.error("Error while getting JWT token from cookie: {}", e.getMessage()); - } - - String jwtTokenHeader = request.getHeader("JwtToken"); - if (jwtTokenHeader != null && !jwtTokenHeader.isEmpty()) { - jwtToken = jwtTokenHeader; + if (jwtToken == null || jwtToken.isEmpty()) { + ServletRequestAttributes attrs = + (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); + if (attrs != null) { + HttpServletRequest request = attrs.getRequest(); + try { + jwtToken = CookieUtil.getJwtTokenFromCookie(request); + } catch (Exception e) { + logger.error("Error while getting JWT token from cookie: {}", e.getMessage()); + } } } @@ -65,6 +62,7 @@ public static HttpEntity createRequestEntity(Object body, String authori headers.add(HttpHeaders.COOKIE, "Jwttoken=" + jwtToken); } + return new HttpEntity<>(body, headers); } } \ No newline at end of file From b384bc5c2f530368c7342ba07f56422b049180bf Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 1 Jul 2025 14:00:33 +0530 Subject: [PATCH 12/23] fix: cherry-pick commits from develop --- .../common/transaction/CommonServiceImpl.java | 2 +- ...wnloadDataFromServerTransactionalImpl.java | 20 +++++++++---------- .../GetMasterDataFromCentralForVanImpl.java | 5 +++++ 3 files changed, 16 insertions(+), 11 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java b/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java index bd4f7cc0..3c962bc1 100644 --- a/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java @@ -650,7 +650,7 @@ public ArrayList getTmCaseSheet(BeneficiaryFlowStatus TmBenFlowOBJ, Bene headers.add("Cookie", "Jwttoken=" + jwtTokenFromCookie); // get TM case sheet by passing TM details - ResponseEntity response = restTemplatePost(tmCentralServer, Authorization, new Gson().toJson(tmReqObj)); + ResponseEntity response = restTemplatePost(tmCentralServer, Authorization, new Gson().toJson(tmReqObj), jwtTokenFromCookie); if (response.getStatusCodeValue() == 200 & response.hasBody()) { JsonObject jsnOBJ = getJsonObj(response); diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java index ee3c39a5..d2441802 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java @@ -87,7 +87,7 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri for (int i = 0; i < 5; i++) { switch (i) { case 0: { - obj = downloadDataFromCentral("db_iemr", "t_indent", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr_sync", "t_indent", vanID, ServerAuthorization, token); List ids = new ArrayList(); Indent[] indentArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), Indent[].class, 1); List indentList = Arrays.asList(indentArr); @@ -109,13 +109,13 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri } indentRepo.saveAll(indentList); - int updateFlag = updateProcessedFlagToCentral("db_iemr", "t_indent", ids, ServerAuthorization, token); + int updateFlag = updateProcessedFlagToCentral("db_iemr_sync", "t_indent", ids, ServerAuthorization, token); } break; } case 1: { - obj = downloadDataFromCentral("db_iemr", "t_indentorder", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr_sync", "t_indentorder", vanID, ServerAuthorization, token); List ids = new ArrayList(); IndentOrder[] indentOrderArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), IndentOrder[].class, 1); @@ -136,12 +136,12 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri indentOrder.setProcessed("P"); } indentOrderRepo.saveAll(indentOrderList); - int updateFlag = updateProcessedFlagToCentral("db_iemr", "t_indentorder", ids, ServerAuthorization, token); + int updateFlag = updateProcessedFlagToCentral("db_iemr_sync", "t_indentorder", ids, ServerAuthorization, token); } break; } case 2: { - obj = downloadDataFromCentral("db_iemr", "t_indentissue", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr_sync", "t_indentissue", vanID, ServerAuthorization, token); List ids = new ArrayList(); IndentIssue[] indentIssueArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), IndentIssue[].class, 1); @@ -164,12 +164,12 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri } indentIssueRepo.saveAll(indentIssueList); - int updateFlag = updateProcessedFlagToCentral("db_iemr", "t_indentissue", ids, ServerAuthorization, token); + int updateFlag = updateProcessedFlagToCentral("db_iemr_sync", "t_indentissue", ids, ServerAuthorization, token); } break; } case 3: { - obj = downloadDataFromCentral("db_iemr", "t_stocktransfer", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr_sync", "t_stocktransfer", vanID, ServerAuthorization, token); List ids = new ArrayList(); T_StockTransfer[] stockTransferArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), T_StockTransfer[].class, 1); @@ -191,13 +191,13 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri } stockTransferRepo.saveAll(stockTransferList); - int updateFlag = updateProcessedFlagToCentral("db_iemr", "t_stocktransfer", ids, + int updateFlag = updateProcessedFlagToCentral("db_iemr_sync", "t_stocktransfer", ids, ServerAuthorization, token); } break; } case 4: { - obj = downloadDataFromCentral("db_iemr", "t_itemstockentry", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr_sync", "t_itemstockentry", vanID, ServerAuthorization, token); List ids = new ArrayList(); ItemStockEntry[] itemStockEntryArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), ItemStockEntry[].class, 1); @@ -219,7 +219,7 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri } itemStockEntryRepo.saveAll(itemStockEntryList); - int updateFlag = updateProcessedFlagToCentral("db_iemr", "t_itemstockentry", ids, + int updateFlag = updateProcessedFlagToCentral("db_iemr_sync", "t_itemstockentry", ids, ServerAuthorization, token); } break; diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java index eaae79c3..6142d257 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java @@ -27,7 +27,10 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; +import org.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.iemr.mmu.data.syncActivity_syncLayer.SyncDownloadMaster; @@ -36,6 +39,7 @@ public class GetMasterDataFromCentralForVanImpl implements GetMasterDataFromCentralForVan { @Autowired private DataSyncRepositoryCentral dataSyncRepositoryCentral; + private Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); public String getMasterDataForVan(SyncDownloadMaster obj) throws Exception { List> resultSetList = new ArrayList<>(); @@ -56,6 +60,7 @@ public String getMasterDataForVan(SyncDownloadMaster obj) throws Exception { } private List> getMasterDataFromGivenTable(SyncDownloadMaster tableDetails) throws Exception { + logger.info("ger master data="+ tableDetails.getSchemaName()); List> resultSetList = new ArrayList<>(); resultSetList = dataSyncRepositoryCentral.getMasterDataFromTable(tableDetails.getSchemaName(), tableDetails.getTableName(), tableDetails.getServerColumnName(), tableDetails.getMasterType(), From 9571a35f77a36b578e41b0b50ff452a839ffb0fe Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Tue, 5 Aug 2025 19:03:02 +0530 Subject: [PATCH 13/23] Fix the Download Masters issue (#103) * fix: resolve the conflicts * fix: fix the issue in download masters table --- ...wnloadDataFromServerTransactionalImpl.java | 10 +- .../DataSyncRepositoryCentralDownload.java | 208 ++++++++++++++++++ .../GetMasterDataFromCentralForVanImpl.java | 6 +- 3 files changed, 218 insertions(+), 6 deletions(-) create mode 100644 src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentralDownload.java diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java index d2441802..5cd8d26d 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java @@ -87,7 +87,7 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri for (int i = 0; i < 5; i++) { switch (i) { case 0: { - obj = downloadDataFromCentral("db_iemr_sync", "t_indent", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr", "t_indent", vanID, ServerAuthorization, token); List ids = new ArrayList(); Indent[] indentArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), Indent[].class, 1); List indentList = Arrays.asList(indentArr); @@ -115,7 +115,7 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri break; } case 1: { - obj = downloadDataFromCentral("db_iemr_sync", "t_indentorder", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr", "t_indentorder", vanID, ServerAuthorization, token); List ids = new ArrayList(); IndentOrder[] indentOrderArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), IndentOrder[].class, 1); @@ -141,7 +141,7 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri break; } case 2: { - obj = downloadDataFromCentral("db_iemr_sync", "t_indentissue", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr", "t_indentissue", vanID, ServerAuthorization, token); List ids = new ArrayList(); IndentIssue[] indentIssueArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), IndentIssue[].class, 1); @@ -169,7 +169,7 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri break; } case 3: { - obj = downloadDataFromCentral("db_iemr_sync", "t_stocktransfer", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr", "t_stocktransfer", vanID, ServerAuthorization, token); List ids = new ArrayList(); T_StockTransfer[] stockTransferArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), T_StockTransfer[].class, 1); @@ -197,7 +197,7 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri break; } case 4: { - obj = downloadDataFromCentral("db_iemr_sync", "t_itemstockentry", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr", "t_itemstockentry", vanID, ServerAuthorization, token); List ids = new ArrayList(); ItemStockEntry[] itemStockEntryArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), ItemStockEntry[].class, 1); diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentralDownload.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentralDownload.java new file mode 100644 index 00000000..565e2466 --- /dev/null +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentralDownload.java @@ -0,0 +1,208 @@ +/* +* AMRIT – Accessible Medical Records via Integrated Technology +* Integrated EHR (Electronic Health Records) Solution +* +* Copyright (C) "Piramal Swasthya Management and Research Institute" +* +* This file is part of AMRIT. +* +* This program is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* This program is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License +* along with this program. If not, see https://www.gnu.org/licenses/. +*/ +package com.iemr.mmu.service.dataSyncLayerCentral; + +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.stereotype.Service; + +/*** + * + * @author NE298657 + * + */ + +@Service +public class DataSyncRepositoryCentralDownload { + @Autowired + private DataSource dataSource; + + private JdbcTemplate jdbcTemplate; + + private JdbcTemplate getJdbcTemplate() { + return new JdbcTemplate(dataSource); + + } + + private Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); + + // Data Upload Repository + public int checkRecordIsAlreadyPresentOrNot(String schemaName, String tableName, String vanSerialNo, String vanID, + String vanAutoIncColumnName, int syncFacilityID) { + jdbcTemplate = getJdbcTemplate(); + + List params = new ArrayList<>(); + + StringBuilder queryBuilder = new StringBuilder("SELECT "); + queryBuilder.append(vanAutoIncColumnName); + queryBuilder.append(" FROM "); + queryBuilder.append(schemaName+"."+tableName); + + //params.add(vanAutoIncColumnName); + //params.add(schemaName); + //params.add(tableName); + + StringBuilder whereClause = new StringBuilder(); + whereClause.append(" WHERE "); + whereClause.append("VanSerialNo = ?"); + params.add(vanSerialNo); + + if ((tableName.equalsIgnoreCase("t_patientissue") || tableName.equalsIgnoreCase("t_physicalstockentry") + || tableName.equalsIgnoreCase("t_stockadjustment") || tableName.equalsIgnoreCase("t_saitemmapping") + || tableName.equalsIgnoreCase("t_stocktransfer") || tableName.equalsIgnoreCase("t_patientreturn") + || tableName.equalsIgnoreCase("t_facilityconsumption") || tableName.equalsIgnoreCase("t_indent") + || tableName.equalsIgnoreCase("t_indentorder") || tableName.equalsIgnoreCase("t_indentissue") + || tableName.equalsIgnoreCase("t_itemstockentry") || tableName.equalsIgnoreCase("t_itemstockexit")) + && syncFacilityID > 0) { + + whereClause.append(" AND "); + whereClause.append("SyncFacilityID = ?"); + params.add(syncFacilityID); + + } + + else { + + whereClause.append(" AND "); + whereClause.append("VanID = ?"); + params.add(vanID); + + } + + queryBuilder.append(whereClause); + String query = queryBuilder.toString(); + Object[] queryParams = params.toArray(); + List> resultSet = jdbcTemplate.queryForList(query, queryParams); + if (resultSet != null && resultSet.size() > 0) + return 1; + else + return 0; + } + + // Method for synchronization of data to central DB + public int[] syncDataToCentralDB(String schema, String tableName, String serverColumns, String query, + List syncDataList) { + jdbcTemplate = getJdbcTemplate(); + if (query.startsWith("INSERT")) { + for (int i = 0; i < syncDataList.size(); i++) { + Object[] array = syncDataList.get(i);// Arrey 1 + + if (query.startsWith("INSERT")) { +// array = new Object[] {serverColumns, array }; + syncDataList.set(i, array); + } + } + } else { + for (int i = 0; i < syncDataList.size(); i++) { + + Object[] array = syncDataList.get(i);// Arrey 1 + String[] columnsArray = null; + if(null != serverColumns) + columnsArray = serverColumns.split(","); // arrey 2 + + List Newarray = new ArrayList<>(); + + int arrayIndex = 0; + int columnsArrayIndex = 0; + //Newarray.add(schema); + //Newarray.add(tableName); + //while (columnsArrayIndex < columnsArray.length || arrayIndex < array.length) { + if (null != columnsArray && columnsArrayIndex < columnsArray.length) { + Newarray.add(columnsArray[columnsArrayIndex]); + columnsArrayIndex++; + } + + /* + * if (arrayIndex < array.length) { Newarray.add(array); arrayIndex++; } + */ + //} + + // Convert Newarray back to an array + //Object[] resultArray = Newarray.toArray(new Object[0]); + syncDataList.set(i, array); + + } + } + // start batch insert/update + int[] i = jdbcTemplate.batchUpdate(query, syncDataList); + return i; + + } + + // End of Data Upload Repository + + public List> getMasterDataFromTable(String schema, String table, String columnNames, + String masterType, Timestamp lastDownloadDate, Integer vanID, Integer psmID) throws Exception { + jdbcTemplate = getJdbcTemplate(); + List> resultSetList =new ArrayList<>(); + String baseQuery = ""; + if (masterType != null) { + if (lastDownloadDate != null) { + if (masterType.equalsIgnoreCase("A")) { + baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table + + " WHERE LastModDate >= ? "; + resultSetList = jdbcTemplate.queryForList(baseQuery,lastDownloadDate); + + } + else if (masterType.equalsIgnoreCase("V")) { + baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table + + " WHERE LastModDate >= ? AND VanID = ? "; + resultSetList = jdbcTemplate.queryForList(baseQuery,lastDownloadDate,vanID); + } + else if (masterType.equalsIgnoreCase("P")) { + baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table + + " WHERE LastModDate >= ? AND ProviderServiceMapID = ? "; + resultSetList = jdbcTemplate.queryForList(baseQuery,lastDownloadDate,psmID); + } + } else { + if (masterType.equalsIgnoreCase("A")) { + baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table; + resultSetList = jdbcTemplate.queryForList(baseQuery); + } + else if (masterType.equalsIgnoreCase("V")) { + baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table + " WHERE VanID = ? "; + resultSetList = jdbcTemplate.queryForList(baseQuery,vanID); + } + else if (masterType.equalsIgnoreCase("P")) { + baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table + + " WHERE ProviderServiceMapID = ? "; + resultSetList = jdbcTemplate.queryForList(baseQuery,psmID); + } + } + } + logger.info("Select query central: " + baseQuery); + logger.info("Last Downloaded Date " + lastDownloadDate); + logger.info("Result set Details: " + resultSetList); + return resultSetList; + } + + // End of Data Download Repository +} \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java index 6142d257..b1ed9103 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java @@ -39,6 +39,10 @@ public class GetMasterDataFromCentralForVanImpl implements GetMasterDataFromCentralForVan { @Autowired private DataSyncRepositoryCentral dataSyncRepositoryCentral; + + @Autowired + private DataSyncRepositoryCentralDownload dataSyncRepositoryCentralDownload; + private Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); public String getMasterDataForVan(SyncDownloadMaster obj) throws Exception { @@ -62,7 +66,7 @@ public String getMasterDataForVan(SyncDownloadMaster obj) throws Exception { private List> getMasterDataFromGivenTable(SyncDownloadMaster tableDetails) throws Exception { logger.info("ger master data="+ tableDetails.getSchemaName()); List> resultSetList = new ArrayList<>(); - resultSetList = dataSyncRepositoryCentral.getMasterDataFromTable(tableDetails.getSchemaName(), + resultSetList = dataSyncRepositoryCentralDownload.getMasterDataFromTable(tableDetails.getSchemaName(), tableDetails.getTableName(), tableDetails.getServerColumnName(), tableDetails.getMasterType(), tableDetails.getLastDownloadDate(), tableDetails.getVanID(), tableDetails.getProviderServiceMapID()); return resultSetList; From 3d3ce58c1ca2eb8cdad0b52011d88f6537ae9179 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 6 Aug 2025 13:38:56 +0530 Subject: [PATCH 14/23] fix: remove the validation (#105) --- .../GetDataFromVanAndSyncToDBImpl.java | 96 +++++++++---------- 1 file changed, 48 insertions(+), 48 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 2d88e6f6..5f67b89f 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -98,10 +98,10 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t String syncTableName = syncUploadDataDigester.getTableName(); String schemaName = syncUploadDataDigester.getSchemaName(); - if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { - logger.error("Invalid schema or table name provided: Schema='{}', Table='{}'.", schemaName, syncTableName); - return "Error: Invalid schema or table name."; - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { + // logger.error("Invalid schema or table name provided: Schema='{}', Table='{}'.", schemaName, syncTableName); + // return "Error: Invalid schema or table name."; + // } // Handle specific tables first, if their logic is distinct @@ -192,10 +192,10 @@ private boolean syncTablesInGroup(String schemaName, String currentTableName, Sy logger.info("Attempting generic sync for table: {}", currentTableName); // Validate schemaName and currentTableName for safety before proceeding - if (!isValidSchemaName(schemaName) || !isValidTableName(currentTableName)) { - logger.error("Invalid schema or table name for group sync: Schema='{}', Table='{}'.", schemaName, currentTableName); - return false; // Fail fast if identifiers are invalid - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(currentTableName)) { + // logger.error("Invalid schema or table name for group sync: Schema='{}', Table='{}'.", schemaName, currentTableName); + // return false; // Fail fast if identifiers are invalid + // } SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); tableSpecificDigester.setSchemaName(schemaName); @@ -217,10 +217,10 @@ private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID(SyncUpload String schemaName = syncUploadDataDigester.getSchemaName(); String tableName = syncUploadDataDigester.getTableName(); - if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - logger.error("Invalid schema or table name provided for m_beneficiaryregidmapping update: Schema='{}', Table='{}'.", schemaName, tableName); - return "Error: Invalid schema or table name."; - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + // logger.error("Invalid schema or table name provided for m_beneficiaryregidmapping update: Schema='{}', Table='{}'.", schemaName, tableName); + // return "Error: Invalid schema or table name."; + // } List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncData = new ArrayList<>(); @@ -268,10 +268,10 @@ public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDat String schemaName = syncUploadDataDigester.getSchemaName(); String tableName = syncUploadDataDigester.getTableName(); - if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - logger.error("Invalid schema or table name provided for i_beneficiarydetails update: Schema='{}', Table='{}'.", schemaName, tableName); - return "Error: Invalid schema or table name."; - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + // logger.error("Invalid schema or table name provided for i_beneficiarydetails update: Schema='{}', Table='{}'.", schemaName, tableName); + // return "Error: Invalid schema or table name."; + // } List syncData = new ArrayList<>(); // This list will hold data for batch updates to 'Processed' @@ -353,10 +353,10 @@ public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDat } private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableName) { - if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - logger.error("Invalid schema or table name for getQueryFor_I_BeneficiaryDetails: Schema='{}', Table='{}'.", schemaName, tableName); - throw new IllegalArgumentException("Invalid schema or table name provided."); - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + // logger.error("Invalid schema or table name for getQueryFor_I_BeneficiaryDetails: Schema='{}', Table='{}'.", schemaName, tableName); + // throw new IllegalArgumentException("Invalid schema or table name provided."); + // } return String.format("UPDATE %s.%s SET Processed = 'P', SyncedDate = now(), SyncedBy = ? WHERE BeneficiaryDetailsId = ? AND VanID = ?", schemaName, tableName); } @@ -370,15 +370,15 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); String serverColumns = syncUploadDataDigester.getServerColumns(); - if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { - logger.error("Invalid schema or table name for generic sync: Schema='{}', Table='{}'.", schemaName, syncTableName); - return false; - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { + // logger.error("Invalid schema or table name for generic sync: Schema='{}', Table='{}'.", schemaName, syncTableName); + // return false; + // } - if (!isValidColumnNames(serverColumns)) { - logger.error("Invalid server columns provided for generic sync: {}", serverColumns); - return false; - } + // if (!isValidColumnNames(serverColumns)) { + // logger.error("Invalid server columns provided for generic sync: {}", serverColumns); + // return false; + // } List> dataToBesync = syncUploadDataDigester.getSyncData(); @@ -538,14 +538,14 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } private String getQueryToInsertDataToServerDB(String schemaName, String tableName, String serverColumns) { - if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - logger.error("Invalid schema or table name for getQueryToInsertDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); - throw new IllegalArgumentException("Invalid schema or table name provided."); - } - if (!isValidColumnNames(serverColumns)) { - logger.error("Invalid server columns provided for getQueryToInsertDataToServerDB: {}", serverColumns); - throw new IllegalArgumentException("Invalid column names provided."); - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + // logger.error("Invalid schema or table name for getQueryToInsertDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); + // throw new IllegalArgumentException("Invalid schema or table name provided."); + // } + // if (!isValidColumnNames(serverColumns)) { + // logger.error("Invalid server columns provided for getQueryToInsertDataToServerDB: {}", serverColumns); + // throw new IllegalArgumentException("Invalid column names provided."); + // } String[] columnsArr = serverColumns.split(","); @@ -562,24 +562,24 @@ private String getQueryToInsertDataToServerDB(String schemaName, String tableNam } public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { - if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - logger.error("Invalid schema or table name for getQueryToUpdateDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); - throw new IllegalArgumentException("Invalid schema or table name provided."); - } - if (!isValidColumnNames(serverColumns)) { - logger.error("Invalid server columns provided for getQueryToUpdateDataToServerDB: {}", serverColumns); - throw new IllegalArgumentException("Invalid column names provided."); - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + // logger.error("Invalid schema or table name for getQueryToUpdateDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); + // throw new IllegalArgumentException("Invalid schema or table name provided."); + // } + // if (!isValidColumnNames(serverColumns)) { + // logger.error("Invalid server columns provided for getQueryToUpdateDataToServerDB: {}", serverColumns); + // throw new IllegalArgumentException("Invalid column names provided."); + // } String[] columnsArr = serverColumns.split(","); StringBuilder preparedStatementSetter = new StringBuilder(); for (int i = 0; i < columnsArr.length; i++) { String column = columnsArr[i].trim(); - if (!isValidColumnName(column)) { - logger.error("Invalid individual column name encountered: {}", column); - throw new IllegalArgumentException("Invalid individual column name provided: " + column); - } + // if (!isValidColumnName(column)) { + // logger.error("Invalid individual column name encountered: {}", column); + // throw new IllegalArgumentException("Invalid individual column name provided: " + column); + // } preparedStatementSetter.append(column); preparedStatementSetter.append(" = ?"); From 4c85e25b8da1ea3c1be485443dbe3292bfc42dd8 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 6 Aug 2025 19:14:03 +0530 Subject: [PATCH 15/23] fix: replace the old working code (#106) --- .../GetDataFromVanAndSyncToDBImpl.java | 379 +++++++----------- 1 file changed, 144 insertions(+), 235 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 5f67b89f..69be169f 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -27,8 +27,6 @@ import java.util.List; import java.util.Map; import java.util.HashMap; -import java.util.Set; -import java.util.HashSet; import org.slf4j.Logger; @@ -50,22 +48,8 @@ public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB private DataSyncRepositoryCentral dataSyncRepositoryCentral; private static final Map> TABLE_GROUPS = new HashMap<>(); - private static final Set VALID_SCHEMAS = new HashSet<>(Arrays.asList("public", "db_iemr")); // Add your actual schema names - private static final Set VALID_TABLES = new HashSet<>(Arrays.asList( - "m_beneficiaryregidmapping", "i_beneficiaryaccount","i_beneficiaryaddress","i_beneficiarycontacts","i_beneficiarydetails","i_beneficiaryfamilymapping","i_beneficiaryidentity","i_beneficiarymapping", - "t_benvisitdetail","t_phy_anthropometry","t_phy_vitals","t_benadherence","t_anccare","t_pnccare","t_ncdscreening","t_ncdcare","i_ben_flow_outreach","t_covid19","t_idrsdetails","t_physicalactivity", - "t_phy_generalexam","t_phy_headtotoe","t_sys_obstetric","t_sys_gastrointestinal","t_sys_cardiovascular","t_sys_respiratory","t_sys_centralnervous","t_sys_musculoskeletalsystem","t_sys_genitourinarysystem", - "t_ancdiagnosis","t_ncddiagnosis","t_pncdiagnosis","t_benchefcomplaint","t_benclinicalobservation","t_prescription","t_prescribeddrug","t_lab_testorder","t_benreferdetails", - "t_lab_testresult","t_physicalstockentry","t_patientissue","t_facilityconsumption","t_itemstockentry","t_itemstockexit", - "t_benmedhistory","t_femaleobstetrichistory","t_benmenstrualdetails","t_benpersonalhabit","t_childvaccinedetail1","t_childvaccinedetail2","t_childoptionalvaccinedetail","t_ancwomenvaccinedetail","t_childfeedinghistory","t_benallergyhistory","t_bencomorbiditycondition","t_benmedicationhistory","t_benfamilyhistory","t_perinatalhistory","t_developmenthistory", - "t_cancerfamilyhistory","t_cancerpersonalhistory","t_cancerdiethistory","t_cancerobstetrichistory","t_cancervitals","t_cancersignandsymptoms","t_cancerlymphnode","t_canceroralexamination","t_cancerbreastexamination","t_cancerabdominalexamination","t_cancergynecologicalexamination","t_cancerdiagnosis","t_cancerimageannotation", - "i_beneficiaryimage", - "t_stockadjustment","t_stocktransfer","t_patientreturn","t_indent","t_indentissue","t_indentorder","t_saitemmapping" - )); - static { - - TABLE_GROUPS.put(1, Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount","i_beneficiaryaddress","i_beneficiarycontacts","i_beneficiarydetails","i_beneficiaryfamilymapping","i_beneficiaryidentity","i_beneficiarymapping")); + TABLE_GROUPS.put(1, Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount","i_beneficiaryaddress","i_beneficiarycontacts","i_beneficiarydetails","i_beneficiaryfamilymapping","i_beneficiaryidentity","i_beneficiarymapping")); TABLE_GROUPS.put(2, Arrays.asList("t_benvisitdetail","t_phy_anthropometry","t_phy_vitals","t_benadherence","t_anccare","t_pnccare","t_ncdscreening","t_ncdcare","i_ben_flow_outreach","t_covid19","t_idrsdetails","t_physicalactivity")); @@ -87,6 +71,7 @@ public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB public String syncDataToServer(String requestOBJ, String Authorization, String token) throws Exception { logger.info("Starting syncDataToServer. Token: {}", token); + ObjectMapper mapper = new ObjectMapper(); SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); @@ -96,13 +81,6 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t } String syncTableName = syncUploadDataDigester.getTableName(); - String schemaName = syncUploadDataDigester.getSchemaName(); - - // if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { - // logger.error("Invalid schema or table name provided: Schema='{}', Table='{}'.", schemaName, syncTableName); - // return "Error: Invalid schema or table name."; - // } - // Handle specific tables first, if their logic is distinct if ("m_beneficiaryregidmapping".equalsIgnoreCase(syncTableName)) { @@ -113,8 +91,7 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); return "Sync failed for m_beneficiaryregidmapping."; } - } - if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { + } else if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { String result = update_I_BeneficiaryDetails_for_processed_in_batches(syncUploadDataDigester); if ("data sync passed".equals(result)) { return "Sync successful for i_beneficiarydetails."; @@ -151,20 +128,22 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t List tablesInGroup = entry.getValue(); logger.info("Starting sync for Group {}", groupId); for (String table : tablesInGroup) { - if (!isValidTableName(table)) { - logger.error("Invalid table name '{}' encountered in group {}. Skipping.", table, groupId); - syncSuccess = false; - errorMessage += "Invalid table name: " + table + " in Group " + groupId + ". "; - continue; // Skip this table - } - try { - + // Create a new digester for each table within the group, + // or adapt if the original digester contains data for multiple tables. + // For simplicity, assuming syncDataDigester needs to be tailored per table or group. + // If your requestOBJ contains data for only one table at a time, this loop might need adjustment + // to fetch data for each table in the group. + // For now, it will use the syncData from the original requestOBJ, which implies + // the original requestOBJ should represent data for a single table. + // A more robust solution would involve fetching data for each table dynamically. boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), table, syncUploadDataDigester); if (!currentTableSyncResult) { syncSuccess = false; errorMessage += "Failed to sync table: " + table + " in Group " + groupId + ". "; logger.error("Sync failed for table '{}' in Group {}. Error: {}", table, groupId, errorMessage); + // Optionally, you can choose to break here or continue to sync other tables in the group/next group + // For now, let's continue to attempt other tables within the group. } else { logger.info("Successfully synced table: {} in Group {}", table, groupId); } @@ -172,31 +151,34 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t syncSuccess = false; errorMessage += "Exception during sync for table: " + table + " in Group " + groupId + ": " + e.getMessage() + ". "; logger.error("Exception during sync for table '{}' in Group {}: {}", table, groupId, e.getMessage(), e); + // Continue to attempt other tables } } } } if (syncSuccess) { - logger.info("Overall data sync passed."); return "Overall data sync passed."; } else { - logger.info("Overall data sync failed. Details: " + errorMessage); return "Overall data sync failed. Details: " + errorMessage; } } } - + /** + * Helper method to sync tables belonging to a specific group. + * This method assumes that the `syncUploadDataDigester` will be populated + * with relevant data for the `currentTableName` before calling this. + * In a real-world scenario, you might fetch data for each table here. + */ private boolean syncTablesInGroup(String schemaName, String currentTableName, SyncUploadDataDigester originalDigester) { logger.info("Attempting generic sync for table: {}", currentTableName); - - // Validate schemaName and currentTableName for safety before proceeding - // if (!isValidSchemaName(schemaName) || !isValidTableName(currentTableName)) { - // logger.error("Invalid schema or table name for group sync: Schema='{}', Table='{}'.", schemaName, currentTableName); - // return false; // Fail fast if identifiers are invalid - // } + // This is a simplification. In a production system, you would likely need + // to retrieve the actual data for 'currentTableName' from the local DB + // based on the group sync approach. For this example, we'll assume the + // originalDigester's syncData is relevant or needs to be re-populated. + // Create a new digester instance or modify the existing one for the current table SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); tableSpecificDigester.setSchemaName(schemaName); tableSpecificDigester.setTableName(currentTableName); @@ -205,7 +187,12 @@ private boolean syncTablesInGroup(String schemaName, String currentTableName, Sy tableSpecificDigester.setVanAutoIncColumnName(originalDigester.getVanAutoIncColumnName()); tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); // Assuming serverColumns is generic or set per table - tableSpecificDigester.setSyncData(originalDigester.getSyncData()); // Placeholder: Replace with actual data fetching + // !!! IMPORTANT: You'll need to fetch the data for 'currentTableName' from your local DB here. + // The `originalDigester.getSyncData()` might not be correct for all tables in a group. + // For demonstration, I'm just using the original digester's data, which is likely incorrect + // if you're syncing multiple tables from a single request. + // You'll need a method like: dataSyncRepositoryLocal.getDataForTable(currentTableName, ...) + tableSpecificDigester.setSyncData(originalDigester.getSyncData()); // Placeholder: Replace with actual data fetching return performGenericTableSync(tableSpecificDigester); } @@ -213,19 +200,11 @@ private boolean syncTablesInGroup(String schemaName, String currentTableName, Sy private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID(SyncUploadDataDigester syncUploadDataDigester) { logger.info("Processing update_M_BeneficiaryRegIdMapping_for_provisioned_benID for table: {}", syncUploadDataDigester.getTableName()); - - String schemaName = syncUploadDataDigester.getSchemaName(); - String tableName = syncUploadDataDigester.getTableName(); - - // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - // logger.error("Invalid schema or table name provided for m_beneficiaryregidmapping update: Schema='{}', Table='{}'.", schemaName, tableName); - // return "Error: Invalid schema or table name."; - // } - List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncData = new ArrayList<>(); - String query = String.format("UPDATE %s.%s SET Provisioned = true, SyncedDate = now(), SyncedBy = ? WHERE BenRegId = ? AND BeneficiaryID = ? AND VanID = ?", schemaName, tableName); + String query = getqueryFor_M_BeneficiaryRegIdMapping(syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName()); for (Map map : dataToBesync) { if (map.get("BenRegId") != null && map.get("BeneficiaryID") != null && map.get("VanID") != null) { @@ -242,8 +221,8 @@ private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID(SyncUpload if (!syncData.isEmpty()) { try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, - tableName, SERVER_COLUMNS_NOT_REQUIRED, query, syncData); + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName(), SERVER_COLUMNS_NOT_REQUIRED, query, syncData); if (i.length == syncData.size()) { logger.info("Successfully updated {} records for m_beneficiaryregidmapping.", i.length); @@ -262,125 +241,76 @@ private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID(SyncUpload } } - + private String getqueryFor_M_BeneficiaryRegIdMapping(String schemaName, String tableName) { + StringBuilder queryBuilder = new StringBuilder(" UPDATE "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append(" SET "); + queryBuilder.append("Provisioned = true, SyncedDate = now(), syncedBy = ?"); + queryBuilder.append(" WHERE "); + queryBuilder.append(" BenRegId = ? "); + queryBuilder.append(" AND "); + queryBuilder.append(" BeneficiaryID = ? "); + queryBuilder.append(" AND "); + queryBuilder.append(" VanID = ? "); + return queryBuilder.toString(); + } + public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDataDigester syncUploadDataDigester) { logger.info("Processing update_I_BeneficiaryDetails_for_processed_in_batches for table: {}", syncUploadDataDigester.getTableName()); - String schemaName = syncUploadDataDigester.getSchemaName(); - String tableName = syncUploadDataDigester.getTableName(); - - // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - // logger.error("Invalid schema or table name provided for i_beneficiarydetails update: Schema='{}', Table='{}'.", schemaName, tableName); - // return "Error: Invalid schema or table name."; - // } - - List syncData = new ArrayList<>(); // This list will hold data for batch updates to 'Processed' - - String updateQuery = getQueryFor_I_BeneficiaryDetails(schemaName, tableName); - - int limit = 1000; - int offset = 0; - int totalProcessed = 0; - - String whereClauseForBatchFetch = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // This is for fetching, not for update - - while (true) { - List> batchToFetch; - try { - batchToFetch = dataSyncRepositoryCentral.getBatchForBenDetails( - syncUploadDataDigester, - whereClauseForBatchFetch, - limit, - offset); - } catch (Exception e) { - logger.error("Error fetching batch for i_beneficiarydetails: {}", e.getMessage(), e); - return "Error fetching data for i_beneficiarydetails: " + e.getMessage(); - } - - if (batchToFetch.isEmpty()) { - break; - } - - for (Map map : batchToFetch) { - if (map.get("BeneficiaryDetailsId") != null && map.get("VanID") != null) { - Object[] params = new Object[3]; - params[0] = syncUploadDataDigester.getSyncedBy(); - params[1] = String.valueOf(map.get("BeneficiaryDetailsId")); - params[2] = String.valueOf(map.get("VanID")); - syncData.add(params); - } else { - logger.warn("Skipping record in i_beneficiarydetails due to missing BeneficiaryDetailsId or VanID: {}", map); - } - } - - if (!syncData.isEmpty()) { - try { - int[] batchUpdateResults = dataSyncRepositoryCentral.syncDataToCentralDB( - schemaName, - tableName, - SERVER_COLUMNS_NOT_REQUIRED, - updateQuery, - syncData); - - int successfulUpdates = 0; - for (int result : batchUpdateResults) { - if (result >= 1) { - successfulUpdates++; - } - } - totalProcessed += successfulUpdates; - logger.info("Batch update for i_beneficiarydetails: {} records processed, {} successfully updated.", syncData.size(), successfulUpdates); - - syncData.clear(); - offset += limit; - - } catch (Exception e) { - logger.error("Exception during batch update for i_beneficiarydetails: {}", e.getMessage(), e); - return "Error during sync for i_beneficiarydetails: " + e.getMessage(); - } - } else { - logger.info("No valid records in the current batch for i_beneficiarydetails to update."); - offset += limit; - } + List syncData = new ArrayList<>(); + + String query = getQueryFor_I_BeneficiaryDetails(syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName()); + + int limit = 1000; + int offset = 0; + int totalProcessed = 0; + + String problematicWhereClause = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // Define it explicitly + + while (true) { + List> batch; + try { + // *** ADD THIS LINE *** + logger.info("DEBUG: Passing whereClause to getBatchForBenDetails: [{}]", problematicWhereClause); + + batch = dataSyncRepositoryCentral.getBatchForBenDetails( + syncUploadDataDigester, + problematicWhereClause, + limit, + offset); + } catch (Exception e) { + logger.error("Error fetching batch for i_beneficiarydetails: {}", e.getMessage(), e); + return "Error fetching data for i_beneficiarydetails: " + e.getMessage(); } - - if (totalProcessed > 0) { + + if (totalProcessed > 0 || syncData.isEmpty()) { // syncData.isEmpty() means no records to process, still a "success" logger.info("Finished processing i_beneficiarydetails. Total records processed: {}", totalProcessed); return "data sync passed"; } else { - logger.info("No records were processed for i_beneficiarydetails."); - return "No data processed for i_beneficiarydetails."; + logger.error("No records were processed for i_beneficiarydetails or an unknown error occurred."); + return "No data processed or sync failed for i_beneficiarydetails."; } } - + } private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableName) { - // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - // logger.error("Invalid schema or table name for getQueryFor_I_BeneficiaryDetails: Schema='{}', Table='{}'.", schemaName, tableName); - // throw new IllegalArgumentException("Invalid schema or table name provided."); - // } - return String.format("UPDATE %s.%s SET Processed = 'P', SyncedDate = now(), SyncedBy = ? WHERE BeneficiaryDetailsId = ? AND VanID = ?", schemaName, tableName); + StringBuilder queryBuilder = new StringBuilder(" UPDATE "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append(" SET "); + queryBuilder.append("Processed = 'P', SyncedDate = now(), SyncedBy = ? "); + queryBuilder.append(" WHERE "); + queryBuilder.append("BeneficiaryDetailsId = ? "); + queryBuilder.append(" AND "); + queryBuilder.append("VanID = ? "); + return queryBuilder.toString(); } - + /** + * Handles the generic synchronization logic for tables not covered by specific handlers. + */ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester) { logger.info("Performing generic sync for table: {}", syncUploadDataDigester.getTableName()); - - String schemaName = syncUploadDataDigester.getSchemaName(); - String syncTableName = syncUploadDataDigester.getTableName(); - String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); - String serverColumns = syncUploadDataDigester.getServerColumns(); - - // if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { - // logger.error("Invalid schema or table name for generic sync: Schema='{}', Table='{}'.", schemaName, syncTableName); - // return false; - // } - - // if (!isValidColumnNames(serverColumns)) { - // logger.error("Invalid server columns provided for generic sync: {}", serverColumns); - // return false; - // } - - List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncDataListInsert = new ArrayList<>(); List syncDataListUpdate = new ArrayList<>(); @@ -390,6 +320,9 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig return true; // Nothing to sync, consider it a success } + String syncTableName = syncUploadDataDigester.getTableName(); + String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); + String schemaName = syncUploadDataDigester.getSchemaName(); Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); for (Map map : dataToBesync) { @@ -397,10 +330,13 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig String vanID = String.valueOf(map.get("VanID")); int syncFacilityID = 0; + // Update SyncedBy and SyncedDate in the map itself before processing map.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); map.put("SyncedDate", String.valueOf(LocalDateTime.now())); // Ensure column name matches DB + // Facility ID processing if (facilityIDFromDigester != null) { + // Determine the 'Processed' status based on facility ID for specific tables switch (syncTableName.toLowerCase()) { case "t_indent": case "t_indentorder": { @@ -464,7 +400,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig // Prepare Object array for insert/update Object[] objArr; - List serverColumnsList = Arrays.asList(serverColumns.split(",")); + List serverColumnsList = Arrays.asList(syncUploadDataDigester.getServerColumns().split(",")); List currentRecordValues = new ArrayList<>(); for (String column : serverColumnsList) { @@ -504,9 +440,9 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig boolean updateSuccess = true; if (!syncDataListInsert.isEmpty()) { - String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); + String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, syncUploadDataDigester.getServerColumns()); try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryInsert, syncDataListInsert); + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, syncUploadDataDigester.getServerColumns(), queryInsert, syncDataListInsert); if (i.length != syncDataListInsert.size()) { insertSuccess = false; logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", syncTableName, syncDataListInsert.size(), i.length, getFailedRecords(i, syncDataListInsert)); @@ -520,7 +456,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } if (!syncDataListUpdate.isEmpty()) { - String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); + String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, syncUploadDataDigester.getServerColumns(), syncTableName); try { int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); if (j.length != syncDataListUpdate.size()) { @@ -538,59 +474,53 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } private String getQueryToInsertDataToServerDB(String schemaName, String tableName, String serverColumns) { - // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - // logger.error("Invalid schema or table name for getQueryToInsertDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); - // throw new IllegalArgumentException("Invalid schema or table name provided."); - // } - // if (!isValidColumnNames(serverColumns)) { - // logger.error("Invalid server columns provided for getQueryToInsertDataToServerDB: {}", serverColumns); - // throw new IllegalArgumentException("Invalid column names provided."); - // } + String[] columnsArr = null; + if (serverColumns != null) + columnsArr = serverColumns.split(","); - - String[] columnsArr = serverColumns.split(","); StringBuilder preparedStatementSetter = new StringBuilder(); - for (int i = 0; i < columnsArr.length; i++) { - preparedStatementSetter.append("?"); - if (i < columnsArr.length - 1) { - preparedStatementSetter.append(", "); + if (columnsArr != null && columnsArr.length > 0) { + for (int i = 0; i < columnsArr.length; i++) { + preparedStatementSetter.append("?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } } } - return String.format("INSERT INTO %s.%s(%s) VALUES (%s)", schemaName, tableName, serverColumns, preparedStatementSetter.toString()); + StringBuilder queryBuilder = new StringBuilder("INSERT INTO "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append("("); + queryBuilder.append(serverColumns); + queryBuilder.append(") VALUES ("); + queryBuilder.append(preparedStatementSetter); + queryBuilder.append(")"); + return queryBuilder.toString(); } public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { - // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - // logger.error("Invalid schema or table name for getQueryToUpdateDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); - // throw new IllegalArgumentException("Invalid schema or table name provided."); - // } - // if (!isValidColumnNames(serverColumns)) { - // logger.error("Invalid server columns provided for getQueryToUpdateDataToServerDB: {}", serverColumns); - // throw new IllegalArgumentException("Invalid column names provided."); - // } - - String[] columnsArr = serverColumns.split(","); + String[] columnsArr = null; + if (serverColumns != null) + columnsArr = serverColumns.split(","); + StringBuilder preparedStatementSetter = new StringBuilder(); - for (int i = 0; i < columnsArr.length; i++) { - String column = columnsArr[i].trim(); - // if (!isValidColumnName(column)) { - // logger.error("Invalid individual column name encountered: {}", column); - // throw new IllegalArgumentException("Invalid individual column name provided: " + column); - // } - - preparedStatementSetter.append(column); - preparedStatementSetter.append(" = ?"); - if (i < columnsArr.length - 1) { - preparedStatementSetter.append(", "); + if (columnsArr != null && columnsArr.length > 0) { + for (int i = 0; i < columnsArr.length; i++) { + preparedStatementSetter.append(columnsArr[i].trim()); + preparedStatementSetter.append(" = ?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } } } - StringBuilder queryBuilder = new StringBuilder(); - queryBuilder.append(String.format("UPDATE %s.%s SET %s WHERE VanSerialNo = ?", schemaName, tableName, preparedStatementSetter.toString())); - + StringBuilder queryBuilder = new StringBuilder(" UPDATE "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append(" SET "); + queryBuilder.append(preparedStatementSetter); + queryBuilder.append(" WHERE VanSerialNo = ? "); if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", @@ -603,41 +533,20 @@ public String getQueryToUpdateDataToServerDB(String schemaName, String serverCol return queryBuilder.toString(); } - private boolean isValidSchemaName(String schemaName) { - return VALID_SCHEMAS.contains(schemaName.toLowerCase()); - } - - private boolean isValidTableName(String tableName) { - return VALID_TABLES.contains(tableName.toLowerCase()); - } - - private boolean isValidColumnName(String columnName) { - return columnName != null && columnName.matches("^[a-zA-Z_][a-zA-Z0-9_]*$"); - } - - private boolean isValidColumnNames(String columnNames) { - if (columnNames == null || columnNames.trim().isEmpty()) { - return false; - } - String[] cols = columnNames.split(","); - for (String col : cols) { - if (!isValidColumnName(col.trim())) { - return false; - } - } - return true; - } - - + // Helper to get information about failed records (for logging purposes) private String getFailedRecords(int[] results, List data) { List failedRecordsInfo = new ArrayList<>(); for (int k = 0; k < results.length; k++) { - if (results[k] < 1) { - String idInfo = "N/A"; - if (data.get(k) != null && data.get(k).length > 0) { - idInfo = "Record data size: " + data.get(k).length; + // In Spring JDBC batchUpdate, a value of Statement.EXECUTE_FAILED or Statement.SUCCESS_NO_INFO + // usually indicates a failure or success without specific row count. + // A common return value for success is 1 (for one row updated/inserted). + if (results[k] < 1) { // Assuming 1 means success, and anything else (0, -2, etc.) means failure + // Attempt to get some identifiable info from the failed record + if (data.get(k).length > 0) { + failedRecordsInfo.add("Record at index " + k + " (VanSerialNo/ID: " + data.get(k)[data.get(k).length - 2] + ")"); + } else { + failedRecordsInfo.add("Record at index " + k + " (No identifiable info)"); } - failedRecordsInfo.add("Record at index " + k + " (Info: " + idInfo + ")"); } } return String.join("; ", failedRecordsInfo); From 9c97e6d9e27545f330ea94858bb4e6e8738f4610 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Fri, 8 Aug 2025 09:13:31 +0530 Subject: [PATCH 16/23] Fix the datasync upload issue (#107) * fix: add the schemas * fix: remove logger * fix: revert the old code for repository --- .../dataSyncActivity/DataSyncRepository.java | 7 +- .../UploadDataToServerImpl.java | 1 + .../DataSyncRepositoryCentral.java | 234 +++++++++--------- .../GetDataFromVanAndSyncToDBImpl.java | 24 +- 4 files changed, 134 insertions(+), 132 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java index 52fcf94a..b039a47f 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java @@ -64,7 +64,7 @@ public List> getDataForGivenSchemaAndTable(String schema, St if (table != null && table.equalsIgnoreCase("m_beneficiaryregidmapping")) { baseQuery = " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE provisioned is true AND processed != 'P' AND vanID is not null "; + + " WHERE provisioned is true AND processed <> 'P' AND vanID is not null "; } else { if (table != null && (table.equalsIgnoreCase("t_patientissue") || table.equalsIgnoreCase("t_physicalstockentry") || table.equalsIgnoreCase("t_stockadjustment") @@ -75,15 +75,14 @@ public List> getDataForGivenSchemaAndTable(String schema, St || table.equalsIgnoreCase("t_itemstockexit"))) { baseQuery = " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE processed != 'P' AND SyncFacilityID is not null "; + + " WHERE processed <> 'P' AND SyncFacilityID is not null "; } else { baseQuery = " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE processed != 'P' AND vanID is not null "; + + " WHERE processed <> 'P' AND vanID is not null "; } } - resultSetList = jdbcTemplate.queryForList(baseQuery); return resultSetList; } diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java index b65fe6e5..9476ecc7 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java @@ -299,6 +299,7 @@ public List getVanAndServerColumnList(Integer groupID) throws private List> getDataToSync(String schemaName, String tableName, String columnNames) throws Exception { + logger.info("Fetching data to sync for schema: {}, table: {}, columns: {}", schemaName, tableName, columnNames); List> resultSetList = dataSyncRepository.getDataForGivenSchemaAndTable(schemaName, tableName, columnNames); if (resultSetList != null) { diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java index 175de980..bdba82d0 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java @@ -21,21 +21,19 @@ */ package com.iemr.mmu.service.dataSyncLayerCentral; -import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.sql.DataSource; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.stereotype.Service; -import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.sql.Statement; // Import Statement for batchUpdate result interpretation +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; @Service public class DataSyncRepositoryCentral { @@ -44,6 +42,7 @@ public class DataSyncRepositoryCentral { private JdbcTemplate jdbcTemplate; + // Lazily initialize jdbcTemplate to ensure DataSource is available private JdbcTemplate getJdbcTemplate() { if (this.jdbcTemplate == null) { this.jdbcTemplate = new JdbcTemplate(dataSource); @@ -51,170 +50,161 @@ private JdbcTemplate getJdbcTemplate() { return this.jdbcTemplate; } - private final Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); - - private static final Set VALID_SCHEMAS = Set.of("public", "db_iemr"); - - private static final Set VALID_TABLES = Set.of( - "m_beneficiaryregidmapping", "i_beneficiaryaccount", "i_beneficiaryaddress", "i_beneficiarycontacts", - "i_beneficiarydetails", "i_beneficiaryfamilymapping", "i_beneficiaryidentity", "i_beneficiarymapping", - "t_benvisitdetail", "t_phy_anthropometry", "t_phy_vitals", "t_benadherence", "t_anccare", "t_pnccare", - "t_ncdscreening", "t_ncdcare", "i_ben_flow_outreach", "t_covid19", "t_idrsdetails", "t_physicalactivity", - "t_phy_generalexam", "t_phy_headtotoe", "t_sys_obstetric", "t_sys_gastrointestinal", "t_sys_cardiovascular", - "t_sys_respiratory", "t_sys_centralnervous", "t_sys_musculoskeletalsystem", "t_sys_genitourinarysystem", - "t_ancdiagnosis", "t_ncddiagnosis", "t_pncdiagnosis", "t_benchefcomplaint", "t_benclinicalobservation", - "t_prescription", "t_prescribeddrug", "t_lab_testorder", "t_benreferdetails", - "t_lab_testresult", "t_physicalstockentry", "t_patientissue", "t_facilityconsumption", "t_itemstockentry", - "t_itemstockexit", "t_benmedhistory", "t_femaleobstetrichistory", "t_benmenstrualdetails", - "t_benpersonalhabit", "t_childvaccinedetail1", "t_childvaccinedetail2", "t_childoptionalvaccinedetail", - "t_ancwomenvaccinedetail", "t_childfeedinghistory", "t_benallergyhistory", "t_bencomorbiditycondition", - "t_benmedicationhistory", "t_benfamilyhistory", "t_perinatalhistory", "t_developmenthistory", - "t_cancerfamilyhistory", "t_cancerpersonalhistory", "t_cancerdiethistory", "t_cancerobstetrichistory", - "t_cancervitals", "t_cancersignandsymptoms", "t_cancerlymphnode", "t_canceroralexamination", - "t_cancerbreastexamination", "t_cancerabdominalexamination", "t_cancergynecologicalexamination", - "t_cancerdiagnosis", "t_cancerimageannotation", "i_beneficiaryimage", "t_stockadjustment", - "t_stocktransfer", "t_patientreturn", "t_indent", "t_indentissue", "t_indentorder", "t_saitemmapping" - ); - - private boolean isValidDatabaseIdentifierCharacter(String identifier) { - return identifier != null && identifier.matches("^[a-zA-Z_][a-zA-Z0-9_]*$"); - } - - private boolean isValidSchemaName(String schemaName) { - return VALID_SCHEMAS.contains(schemaName.toLowerCase()); - } - - private boolean isValidTableName(String tableName) { - return VALID_TABLES.contains(tableName.toLowerCase()); - } - - private boolean isValidColumnNamesList(String columnNames) { - if (columnNames == null || columnNames.trim().isEmpty()) { - return false; - } - for (String col : columnNames.split(",")) { - if (!isValidDatabaseIdentifierCharacter(col.trim())) { - return false; - } - } - return true; - } + private Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); + // Data Upload Repository public int checkRecordIsAlreadyPresentOrNot(String schemaName, String tableName, String vanSerialNo, String vanID, String vanAutoIncColumnName, int syncFacilityID) { jdbcTemplate = getJdbcTemplate(); - List params = new ArrayList<>(); - if (!isValidSchemaName(schemaName) || !isValidTableName(tableName) || - !isValidDatabaseIdentifierCharacter(vanAutoIncColumnName)) { - logger.error("Invalid identifiers: schema={}, table={}, column={}", schemaName, tableName, vanAutoIncColumnName); - throw new IllegalArgumentException("Invalid identifiers provided."); - } + List params = new ArrayList<>(); - StringBuilder queryBuilder = new StringBuilder("SELECT ") - .append(vanAutoIncColumnName).append(" FROM ") - .append(schemaName).append(".").append(tableName).append(" WHERE VanSerialNo = ?"); + StringBuilder queryBuilder = new StringBuilder("SELECT "); + queryBuilder.append(vanAutoIncColumnName); + queryBuilder.append(" FROM "); + queryBuilder.append(schemaName).append(".").append(tableName); + StringBuilder whereClause = new StringBuilder(); + whereClause.append(" WHERE "); + whereClause.append("VanSerialNo = ?"); params.add(vanSerialNo); - if (List.of("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", - "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit").contains(tableName.toLowerCase()) && syncFacilityID > 0) { - queryBuilder.append(" AND SyncFacilityID = ?"); + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(tableName.toLowerCase()) && syncFacilityID > 0) { + + whereClause.append(" AND "); + whereClause.append("SyncFacilityID = ?"); params.add(syncFacilityID); + } else { - queryBuilder.append(" AND VanID = ?"); + whereClause.append(" AND "); + whereClause.append("VanID = ?"); params.add(vanID); } + queryBuilder.append(whereClause); + String query = queryBuilder.toString(); + Object[] queryParams = params.toArray(); + + logger.debug("Checking record existence query: {} with params: {}", query, Arrays.toString(queryParams)); + System.out.println("Checking record existence query: " + query + " with params: " + Arrays.toString(queryParams)); + try { - List> resultSet = jdbcTemplate.queryForList(queryBuilder.toString(), params.toArray()); - return (resultSet != null && !resultSet.isEmpty()) ? 1 : 0; + List> resultSet = jdbcTemplate.queryForList(query, queryParams); + if (resultSet != null && !resultSet.isEmpty()) { + System.out.println("Record found for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); + logger.debug("Record found for table {}: VanSerialNo={}, VanID={}", tableName, vanSerialNo, vanID); + return 1; + } else { + System.out.println("No record found for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); + logger.debug("No record found for table {}: VanSerialNo={}, VanID={}", tableName, vanSerialNo, vanID); + return 0; + } + } catch (org.springframework.dao.EmptyResultDataAccessException e) { + System.out.println("No record found (EmptyResultDataAccessException) for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); + logger.debug("No record found (EmptyResultDataAccessException) for table {}: VanSerialNo={}, VanID={}", tableName, vanSerialNo, vanID); + return 0; } catch (Exception e) { - logger.error("Error checking record presence: {}", e.getMessage(), e); - throw new RuntimeException("Failed to check record existence: " + e.getMessage(), e); + System.out.println("Database error during checkRecordIsAlreadyPresentOrNot for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); + logger.error("Database error during checkRecordIsAlreadyPresentOrNot for table {}: VanSerialNo={}, VanID={}. Error: {}", tableName, vanSerialNo, vanID, e.getMessage(), e); + throw new RuntimeException("Failed to check record existence: " + e.getMessage(), e); // Re-throw or handle as appropriate } } + // Method for synchronization of data to central DB public int[] syncDataToCentralDB(String schema, String tableName, String serverColumns, String query, List syncDataList) { jdbcTemplate = getJdbcTemplate(); + logger.info("Executing batch operation for table: {}. Query type: {}. Number of records: {}", tableName, query.startsWith("INSERT") ? "INSERT" : "UPDATE", syncDataList.size()); + logger.debug("Query: {}", query); +System.out.println("Executing batch operation for table: " + tableName + ". Query type: " + (query.startsWith("INSERT") ? "INSERT" : "UPDATE") + ". Number of records: " + syncDataList.size()); try { - return jdbcTemplate.batchUpdate(query, syncDataList); + // Start batch insert/update + int[] i = jdbcTemplate.batchUpdate(query, syncDataList); + System.out.println("Batch operation completed for table " + tableName + ". Results: " + Arrays.toString(i)); + logger.info("Batch operation completed for table {}. Results: {}", tableName, Arrays.toString(i)); + return i; } catch (Exception e) { - logger.error("Batch sync failed for table {}: {}", tableName, e.getMessage(), e); - throw new RuntimeException("Batch sync failed: " + e.getMessage(), e); + logger.error("Exception during batch update for table {}: {}", tableName, e.getMessage(), e); + System.out.println("Exception during batch update for table " + tableName + ": " + e.getMessage()); + // Log the error with detailed information + // Re-throw the exception to be handled by the service layer, so specific errors can be captured. + throw new RuntimeException("Batch sync failed for table " + tableName + ": " + e.getMessage(), e); } } + // End of Data Upload Repository + public List> getMasterDataFromTable(String schema, String table, String columnNames, - String masterType, Timestamp lastDownloadDate, Integer vanID, Integer psmID) { + String masterType, Timestamp lastDownloadDate, Integer vanID, Integer psmID) throws Exception { jdbcTemplate = getJdbcTemplate(); + List> resultSetList = new ArrayList<>(); + StringBuilder baseQueryBuilder = new StringBuilder(" SELECT ").append(columnNames).append(" FROM ").append(schema).append(".").append(table); List params = new ArrayList<>(); - if (!isValidSchemaName(schema) || !isValidTableName(table) || !isValidColumnNamesList(columnNames)) { - throw new IllegalArgumentException("Invalid schema, table, or column names."); - } - - StringBuilder queryBuilder = new StringBuilder("SELECT ").append(columnNames) - .append(" FROM ").append(schema).append(".").append(table); - if (masterType != null) { if (lastDownloadDate != null) { - queryBuilder.append(" WHERE LastModDate >= ?"); + baseQueryBuilder.append(" WHERE LastModDate >= ? "); params.add(lastDownloadDate); - if ("V".equalsIgnoreCase(masterType)) { - queryBuilder.append(" AND VanID = ?"); + if (masterType.equalsIgnoreCase("V")) { + baseQueryBuilder.append(" AND VanID = ? "); params.add(vanID); - } else if ("P".equalsIgnoreCase(masterType)) { - queryBuilder.append(" AND ProviderServiceMapID = ?"); + } else if (masterType.equalsIgnoreCase("P")) { + baseQueryBuilder.append(" AND ProviderServiceMapID = ? "); params.add(psmID); } } else { - queryBuilder.append(" WHERE "); - if ("V".equalsIgnoreCase(masterType)) { - queryBuilder.append("VanID = ?"); + if (masterType.equalsIgnoreCase("V")) { + baseQueryBuilder.append(" WHERE VanID = ? "); params.add(vanID); - } else if ("P".equalsIgnoreCase(masterType)) { - queryBuilder.append("ProviderServiceMapID = ?"); + } else if (masterType.equalsIgnoreCase("P")) { + baseQueryBuilder.append(" WHERE ProviderServiceMapID = ? "); params.add(psmID); } } } + String finalQuery = baseQueryBuilder.toString(); + logger.info("Select query central: {}", finalQuery); + logger.info("Last Downloaded Date: {}", lastDownloadDate); + logger.info("Query Params: {}", params); + System.out.println("Select query central: " + finalQuery); + System.out.println("Last Downloaded Date: " + lastDownloadDate); + System.out.println("Query Params: " + params); + try { - // Safe dynamic SQL: All dynamic parts (table names, columns, etc.) are validated or hardcoded. - // Parameter values are bound safely using prepared statement placeholders (?). - return jdbcTemplate.queryForList(queryBuilder.toString(), params.toArray()); + if (params.isEmpty()) { + resultSetList = jdbcTemplate.queryForList(finalQuery); + } else { + resultSetList = jdbcTemplate.queryForList(finalQuery, params.toArray()); + } } catch (Exception e) { - logger.error("Error fetching master data: {}", e.getMessage(), e); + System.out.println("Error fetching master data from table " + table + ": " + e.getMessage()); + logger.error("Error fetching master data from table {}: {}", table, e.getMessage(), e); throw new RuntimeException("Failed to fetch master data: " + e.getMessage(), e); } +System.out.println("Result set Details size: " + resultSetList.size()); + logger.info("Result set Details size: {}", resultSetList.size()); + return resultSetList; } - public List> getBatchForBenDetails(SyncUploadDataDigester digester, + public List> getBatchForBenDetails(String schema, String table, String columnNames, String whereClause, int limit, int offset) { - jdbcTemplate = getJdbcTemplate(); - -String schema = digester.getSchemaName(); - String table = digester.getTableName(); - String columnNames = digester.getServerColumns(); - - - if (!isValidSchemaName(schema) || !isValidTableName(table) || !isValidColumnNamesList(columnNames)) { - throw new IllegalArgumentException("Invalid schema, table, or column names."); - } - // Safe dynamic SQL: Schema, table, and column names are validated against predefined whitelists. - // Only trusted values are used in the query string. - // limit and offset are passed as parameters to prevent SQL injection. - String query = String.format("SELECT %s FROM %s.%s %s LIMIT ? OFFSET ?", columnNames, schema, table, whereClause); //NOSONAR - - try { - return jdbcTemplate.queryForList(query, limit, offset); - } catch (Exception e) { - logger.error("Error fetching batch details: {}", e.getMessage(), e); - throw new RuntimeException("Failed to fetch batch data: " + e.getMessage(), e); - } + jdbcTemplate = getJdbcTemplate(); + String query = "SELECT " + columnNames + " FROM " + schema + "." + table + whereClause + " LIMIT ? OFFSET ?"; + System.out.println("Fetching batch for beneficiary details. Query: " + query + ", Limit: " + limit + ", Offset: " + offset); + logger.debug("Fetching batch for beneficiary details. Query: {}, Limit: {}, Offset: {}", query, limit, offset); + try { + return jdbcTemplate.queryForList(query, limit, offset); + } catch (Exception e) { + logger.error("Error fetching batch for beneficiary details from table {}: {}", table, e.getMessage(), e); + System.out.println("Error fetching batch for beneficiary details from table " + table + ": " + e.getMessage()); + throw new RuntimeException("Failed to fetch batch data: " + e.getMessage(), e); } -} + } + + // End of Data Download Repository +} \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 69be169f..4a97725b 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -49,6 +49,17 @@ public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB private static final Map> TABLE_GROUPS = new HashMap<>(); static { + // Group 1: Master data or less frequently changing data + TABLE_GROUPS.put(1, Arrays.asList("m_beneficiaryregidmapping", "m_another_master_table")); + + // Group 2: Transactional data that might involve facility ID + TABLE_GROUPS.put(2, Arrays.asList("t_indent", "t_indentorder", "t_indentissue", "t_stocktransfer", "t_itemstockentry")); + + // Group 3: High volume transactional data + TABLE_GROUPS.put(3, Arrays.asList("i_beneficiarydetails", "t_patientissue", "t_physicalstockentry", + "t_stockadjustment", "t_saitemmapping", "t_patientreturn", + "t_facilityconsumption", "t_itemstockexit")); + // Add more groups as needed, up to 9 TABLE_GROUPS.put(1, Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount","i_beneficiaryaddress","i_beneficiarycontacts","i_beneficiarydetails","i_beneficiaryfamilymapping","i_beneficiaryidentity","i_beneficiarymapping")); TABLE_GROUPS.put(2, Arrays.asList("t_benvisitdetail","t_phy_anthropometry","t_phy_vitals","t_benadherence","t_anccare","t_pnccare","t_ncdscreening","t_ncdcare","i_ben_flow_outreach","t_covid19","t_idrsdetails","t_physicalactivity")); @@ -67,11 +78,10 @@ public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB TABLE_GROUPS.put(9, Arrays.asList("t_itemstockentry","t_itemstockexit","t_patientissue","t_physicalstockentry","t_stockadjustment","t_stocktransfer","t_patientreturn","t_facilityconsumption","t_indent","t_indentissue","t_indentorder","t_saitemmapping")); - } + } public String syncDataToServer(String requestOBJ, String Authorization, String token) throws Exception { logger.info("Starting syncDataToServer. Token: {}", token); - ObjectMapper mapper = new ObjectMapper(); SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); @@ -91,7 +101,8 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); return "Sync failed for m_beneficiaryregidmapping."; } - } else if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { + } + if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { String result = update_I_BeneficiaryDetails_for_processed_in_batches(syncUploadDataDigester); if ("data sync passed".equals(result)) { return "Sync successful for i_beneficiarydetails."; @@ -271,12 +282,13 @@ public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDat while (true) { List> batch; try { - // *** ADD THIS LINE *** logger.info("DEBUG: Passing whereClause to getBatchForBenDetails: [{}]", problematicWhereClause); batch = dataSyncRepositoryCentral.getBatchForBenDetails( - syncUploadDataDigester, - problematicWhereClause, + syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName(), + syncUploadDataDigester.getServerColumns(), + problematicWhereClause, // Use the variable limit, offset); } catch (Exception e) { From 43df9cf6563abc6ed1c5702e9a062e5386480a28 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Sun, 10 Aug 2025 19:05:10 +0530 Subject: [PATCH 17/23] Fixing the datasync from local to central (#110) * fix: datasync from local to central * fix: fix the token --- .../MMUDataSyncVanToServer.java | 6 +- .../dataSyncActivity/DataSyncRepository.java | 33 ++- .../UploadDataToServerImpl.java | 53 +--- .../DataSyncRepositoryCentral.java | 238 ++++++++------- .../GetDataFromVanAndSyncToDBImpl.java | 280 +++++++++++------- .../mmu/utils/JwtUserIdValidationFilter.java | 15 +- .../com/iemr/mmu/utils/RestTemplateUtil.java | 16 +- 7 files changed, 337 insertions(+), 304 deletions(-) diff --git a/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java b/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java index eeb54e9f..524094e5 100644 --- a/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java +++ b/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java @@ -63,12 +63,8 @@ public String dataSyncToServer(@RequestBody String requestOBJ, @RequestHeader(value = "Authorization") String Authorization, HttpServletRequest request) { OutputResponse response = new OutputResponse(); - logger.info("test: vanto server auth="+Authorization); try { - String jwtToken = CookieUtil.getJwtTokenFromCookie(request); - logger.info("test: vanto server token="+jwtToken); - - String s = getDataFromVanAndSyncToDBImpl.syncDataToServer(requestOBJ, Authorization, jwtToken); + String s = getDataFromVanAndSyncToDBImpl.syncDataToServer(requestOBJ, Authorization); if (s != null) response.setResponse(s); else diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java index b039a47f..9b020a4e 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java @@ -28,7 +28,10 @@ import javax.sql.DataSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.annotation.CreatedDate; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.stereotype.Service; @@ -50,6 +53,8 @@ public class DataSyncRepository { @Autowired private SyncUtilityClassRepo syncutilityClassRepo; + private Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); + private JdbcTemplate getJdbcTemplate() { return new JdbcTemplate(dataSource); @@ -64,7 +69,7 @@ public List> getDataForGivenSchemaAndTable(String schema, St if (table != null && table.equalsIgnoreCase("m_beneficiaryregidmapping")) { baseQuery = " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE provisioned is true AND processed <> 'P' AND vanID is not null "; + + " WHERE provisioned is true AND processed != 'P' AND vanID is not null "; } else { if (table != null && (table.equalsIgnoreCase("t_patientissue") || table.equalsIgnoreCase("t_physicalstockentry") || table.equalsIgnoreCase("t_stockadjustment") @@ -75,30 +80,36 @@ public List> getDataForGivenSchemaAndTable(String schema, St || table.equalsIgnoreCase("t_itemstockexit"))) { baseQuery = " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE processed <> 'P' AND SyncFacilityID is not null "; + + " WHERE processed != 'P' AND SyncFacilityID is not null "; } else { baseQuery = " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE processed <> 'P' AND vanID is not null "; + + " WHERE processed != 'P' AND vanID is not null "; } } + resultSetList = jdbcTemplate.queryForList(baseQuery); return resultSetList; } - - public int updateProcessedFlagInVan(String schemaName, String tableName, StringBuilder vanSerialNos, String autoIncreamentColumn, String user) throws Exception { jdbcTemplate = getJdbcTemplate(); - String query = " UPDATE " + schemaName + "." + tableName - + " SET processed = 'P' , SyncedDate = ?, Syncedby = ? WHERE " + autoIncreamentColumn - + " IN (" + vanSerialNos + ")"; + String query = ""; - Timestamp syncedDate = new Timestamp(System.currentTimeMillis()); - int updatedRows = jdbcTemplate.update(query, syncedDate, user); + if (tableName != null && tableName.toLowerCase().equals("i_ben_flow_outreach")) { + query = "UPDATE " + schemaName + "." + tableName + + " SET created_date = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " + + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; + } else { + query = "UPDATE " + schemaName + "." + tableName + + " SET CreatedDate = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " + + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; + } + Timestamp syncedDate = new Timestamp(System.currentTimeMillis()); + int updatedRows = jdbcTemplate.update(query, syncedDate, syncedDate, user); return updatedRows; } @@ -119,4 +130,4 @@ public int[] updateLatestMasterInLocal(String query, List syncDataList // ---------------------------------- End of Download Repository -} +} \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java index 9476ecc7..2c24dc2c 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java @@ -243,32 +243,8 @@ private void setResponseStatus(Map groupIdStatus, int groupId, S groupIdStatus.put("groupId", String.valueOf(groupId)); groupIdStatus.put("status", serverAcknowledgement); responseStatus.add(groupIdStatus); - logger.info("Response from data sync: {}", responseStatus); } -// private boolean setResponseStatus(Map groupIdStatus, int groupId, String serverAcknowledgement, -// List> responseStatus, boolean isProgress) { -// if (serverAcknowledgement != null) { -// groupIdStatus.put("groupId", String.valueOf(groupId)); -// groupIdStatus.put("status", serverAcknowledgement); -// responseStatus.add(groupIdStatus); -// logger.info("Response from data sync", responseStatus); -// } else if (isProgress) { -// groupIdStatus.put("groupId", String.valueOf(groupId)); -// groupIdStatus.put("status", "pending"); -// responseStatus.add(groupIdStatus); -// logger.info("Response from data sync", responseStatus); -// } else { -// isProgress = true; -// groupIdStatus.put("groupId", String.valueOf(groupId)); -// groupIdStatus.put("status", "failed"); -// responseStatus.add(groupIdStatus); -// logger.info("Response from data sync", responseStatus); -// } -// return isProgress; -// -// } - /** * * @param syncTableDetailsIDs @@ -299,7 +275,6 @@ public List getVanAndServerColumnList(Integer groupID) throws private List> getDataToSync(String schemaName, String tableName, String columnNames) throws Exception { - logger.info("Fetching data to sync for schema: {}, table: {}, columns: {}", schemaName, tableName, columnNames); List> resultSetList = dataSyncRepository.getDataForGivenSchemaAndTable(schemaName, tableName, columnNames); if (resultSetList != null) { @@ -343,14 +318,11 @@ private List> getBatchOfAskedSizeDataToSync(List> dataToBesync, String user, String Authorization, String token) throws Exception { - logger.debug( - "Entering syncDataToServer with vanID: {}, schemaName: '{}', tableName: '{}', vanAutoIncColumnName: '{}', serverColumns: '{}', user: '{}'", - vanID, schemaName, tableName, vanAutoIncColumnName, serverColumns, user); + RestTemplate restTemplate = new RestTemplate(); Integer facilityID = masterVanRepo.getFacilityID(vanID); - logger.debug("Fetched facilityID for vanID {}: {}", vanID, facilityID); // serialize null GsonBuilder gsonBuilder = new GsonBuilder(); @@ -368,33 +340,18 @@ public String syncDataToServer(int vanID, String schemaName, String tableName, S dataMap.put("facilityID", facilityID); String requestOBJ = gson.toJson(dataMap); - - HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization,token); - logger.info("Before Data sync upload Url" + dataSyncUploadUrl); + HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization,"datasync"); ResponseEntity response = restTemplate.exchange(dataSyncUploadUrl, HttpMethod.POST, request, String.class); - logger.info("Received response from data sync URL: {}", response); - logger.info("Received response from data sync URL: {}", dataSyncUploadUrl); - - logger.info("After Data sync upload Url" + dataSyncUploadUrl); - /** - * if data successfully synced then getVanSerialNo of synced data to update - * processed flag - */ + int i = 0; if (response != null && response.hasBody()) { JSONObject obj = new JSONObject(response.getBody()); if (obj != null && obj.has("statusCode") && obj.getInt("statusCode") == 200) { StringBuilder vanSerialNos = getVanSerialNoListForSyncedData(vanAutoIncColumnName, dataToBesync); - logger.info( - "Updating processed flag for schemaName: {}, tableName: {}, vanSerialNos: {}, vanAutoIncColumnName: {}, user: {}", - schemaName, tableName, vanSerialNos.toString(), vanAutoIncColumnName, user); - // update table for processed flag = "P" - logger.info(schemaName + "|" + tableName + "|" + vanSerialNos.toString() + "|" + vanAutoIncColumnName - + "|" + user); + i = dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, vanSerialNos, vanAutoIncColumnName, user); - logger.debug("Updated processed flag in database. Records affected: {}", i); } } if (i > 0) @@ -435,4 +392,4 @@ public String getDataSyncGroupDetails() { return null; } -} +} \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java index bdba82d0..a5d8422d 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java @@ -21,19 +21,21 @@ */ package com.iemr.mmu.service.dataSyncLayerCentral; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import javax.sql.DataSource; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.stereotype.Service; -import javax.sql.DataSource; -import java.sql.Timestamp; -import java.sql.Statement; // Import Statement for batchUpdate result interpretation -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; +import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; @Service public class DataSyncRepositoryCentral { @@ -42,7 +44,6 @@ public class DataSyncRepositoryCentral { private JdbcTemplate jdbcTemplate; - // Lazily initialize jdbcTemplate to ensure DataSource is available private JdbcTemplate getJdbcTemplate() { if (this.jdbcTemplate == null) { this.jdbcTemplate = new JdbcTemplate(dataSource); @@ -50,161 +51,176 @@ private JdbcTemplate getJdbcTemplate() { return this.jdbcTemplate; } - private Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); + private final Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); + + private static final Set VALID_SCHEMAS = Set.of("public", "db_iemr", "db_identity","apl_db_iemr","apl_db_identity","db_iemr_sync","db_identity_sync"); + + private static final Set VALID_TABLES = Set.of( + "m_beneficiaryregidmapping", "i_beneficiaryaccount", "i_beneficiaryaddress", "i_beneficiarycontacts", + "i_beneficiarydetails", "i_beneficiaryfamilymapping", "i_beneficiaryidentity", "i_beneficiarymapping", + "t_benvisitdetail", "t_phy_anthropometry", "t_phy_vitals", "t_benadherence", "t_anccare", "t_pnccare", + "t_ncdscreening", "t_ncdcare", "i_ben_flow_outreach", "t_covid19", "t_idrsdetails", "t_physicalactivity", + "t_phy_generalexam", "t_phy_headtotoe", "t_sys_obstetric", "t_sys_gastrointestinal", "t_sys_cardiovascular", + "t_sys_respiratory", "t_sys_centralnervous", "t_sys_musculoskeletalsystem", "t_sys_genitourinarysystem", + "t_ancdiagnosis", "t_ncddiagnosis", "t_pncdiagnosis", "t_benchefcomplaint", "t_benclinicalobservation", + "t_prescription", "t_prescribeddrug", "t_lab_testorder", "t_benreferdetails", + "t_lab_testresult", "t_physicalstockentry", "t_patientissue", "t_facilityconsumption", "t_itemstockentry", + "t_itemstockexit", "t_benmedhistory", "t_femaleobstetrichistory", "t_benmenstrualdetails", + "t_benpersonalhabit", "t_childvaccinedetail1", "t_childvaccinedetail2", "t_childoptionalvaccinedetail", + "t_ancwomenvaccinedetail", "t_childfeedinghistory", "t_benallergyhistory", "t_bencomorbiditycondition", + "t_benmedicationhistory", "t_benfamilyhistory", "t_perinatalhistory", "t_developmenthistory", + "t_cancerfamilyhistory", "t_cancerpersonalhistory", "t_cancerdiethistory", "t_cancerobstetrichistory", + "t_cancervitals", "t_cancersignandsymptoms", "t_cancerlymphnode", "t_canceroralexamination", + "t_cancerbreastexamination", "t_cancerabdominalexamination", "t_cancergynecologicalexamination", + "t_cancerdiagnosis", "t_cancerimageannotation", "i_beneficiaryimage", "t_stockadjustment", + "t_stocktransfer", "t_patientreturn", "t_indent", "t_indentissue", "t_indentorder", "t_saitemmapping"); + + private boolean isValidDatabaseIdentifierCharacter(String identifier) { + return identifier != null && identifier.matches("^[a-zA-Z_][a-zA-Z0-9_]*$"); + } + + private boolean isValidSchemaName(String schemaName) { + return VALID_SCHEMAS.contains(schemaName.toLowerCase()); + } + + private boolean isValidTableName(String tableName) { + return VALID_TABLES.contains(tableName.toLowerCase()); + } + + private boolean isValidColumnNamesList(String columnNames) { + if (columnNames == null || columnNames.trim().isEmpty()) { + return false; + } + for (String col : columnNames.split(",")) { + if (!isValidDatabaseIdentifierCharacter(col.trim())) { + return false; + } + } + return true; + } - // Data Upload Repository public int checkRecordIsAlreadyPresentOrNot(String schemaName, String tableName, String vanSerialNo, String vanID, - String vanAutoIncColumnName, int syncFacilityID) { + String vanAutoIncColumnName, int syncFacilityID) { jdbcTemplate = getJdbcTemplate(); - List params = new ArrayList<>(); - StringBuilder queryBuilder = new StringBuilder("SELECT "); - queryBuilder.append(vanAutoIncColumnName); - queryBuilder.append(" FROM "); - queryBuilder.append(schemaName).append(".").append(tableName); + if (!isValidSchemaName(schemaName) || !isValidTableName(tableName) || + !isValidDatabaseIdentifierCharacter(vanAutoIncColumnName)) { + logger.error("Invalid identifiers: schema={}, table={}, column={}", schemaName, tableName, + vanAutoIncColumnName); + throw new IllegalArgumentException("Invalid identifiers provided."); + } + + StringBuilder queryBuilder = new StringBuilder("SELECT ") + .append(vanAutoIncColumnName).append(" FROM ") + .append(schemaName).append(".").append(tableName).append(" WHERE VanSerialNo = ?"); - StringBuilder whereClause = new StringBuilder(); - whereClause.append(" WHERE "); - whereClause.append("VanSerialNo = ?"); params.add(vanSerialNo); - if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + if (List.of("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") .contains(tableName.toLowerCase()) && syncFacilityID > 0) { - - whereClause.append(" AND "); - whereClause.append("SyncFacilityID = ?"); + queryBuilder.append(" AND SyncFacilityID = ?"); params.add(syncFacilityID); - } else { - whereClause.append(" AND "); - whereClause.append("VanID = ?"); + queryBuilder.append(" AND VanID = ?"); params.add(vanID); } - queryBuilder.append(whereClause); - String query = queryBuilder.toString(); - Object[] queryParams = params.toArray(); - - logger.debug("Checking record existence query: {} with params: {}", query, Arrays.toString(queryParams)); - System.out.println("Checking record existence query: " + query + " with params: " + Arrays.toString(queryParams)); - try { - List> resultSet = jdbcTemplate.queryForList(query, queryParams); - if (resultSet != null && !resultSet.isEmpty()) { - System.out.println("Record found for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); - logger.debug("Record found for table {}: VanSerialNo={}, VanID={}", tableName, vanSerialNo, vanID); - return 1; - } else { - System.out.println("No record found for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); - logger.debug("No record found for table {}: VanSerialNo={}, VanID={}", tableName, vanSerialNo, vanID); - return 0; - } - } catch (org.springframework.dao.EmptyResultDataAccessException e) { - System.out.println("No record found (EmptyResultDataAccessException) for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); - logger.debug("No record found (EmptyResultDataAccessException) for table {}: VanSerialNo={}, VanID={}", tableName, vanSerialNo, vanID); - return 0; + List> resultSet = jdbcTemplate.queryForList(queryBuilder.toString(), params.toArray()); + return (resultSet != null && !resultSet.isEmpty()) ? 1 : 0; } catch (Exception e) { - System.out.println("Database error during checkRecordIsAlreadyPresentOrNot for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); - logger.error("Database error during checkRecordIsAlreadyPresentOrNot for table {}: VanSerialNo={}, VanID={}. Error: {}", tableName, vanSerialNo, vanID, e.getMessage(), e); - throw new RuntimeException("Failed to check record existence: " + e.getMessage(), e); // Re-throw or handle as appropriate + logger.error("Error checking record presence: {}", e.getMessage(), e); + throw new RuntimeException("Failed to check record existence: " + e.getMessage(), e); } } - // Method for synchronization of data to central DB public int[] syncDataToCentralDB(String schema, String tableName, String serverColumns, String query, - List syncDataList) { + List syncDataList) { jdbcTemplate = getJdbcTemplate(); - logger.info("Executing batch operation for table: {}. Query type: {}. Number of records: {}", tableName, query.startsWith("INSERT") ? "INSERT" : "UPDATE", syncDataList.size()); - logger.debug("Query: {}", query); -System.out.println("Executing batch operation for table: " + tableName + ". Query type: " + (query.startsWith("INSERT") ? "INSERT" : "UPDATE") + ". Number of records: " + syncDataList.size()); try { - // Start batch insert/update - int[] i = jdbcTemplate.batchUpdate(query, syncDataList); - System.out.println("Batch operation completed for table " + tableName + ". Results: " + Arrays.toString(i)); - logger.info("Batch operation completed for table {}. Results: {}", tableName, Arrays.toString(i)); - return i; + + return jdbcTemplate.batchUpdate(query, syncDataList); } catch (Exception e) { - logger.error("Exception during batch update for table {}: {}", tableName, e.getMessage(), e); - System.out.println("Exception during batch update for table " + tableName + ": " + e.getMessage()); - // Log the error with detailed information - // Re-throw the exception to be handled by the service layer, so specific errors can be captured. - throw new RuntimeException("Batch sync failed for table " + tableName + ": " + e.getMessage(), e); + logger.error("Batch sync failed for table {}: {}", tableName, e.getMessage(), e); + throw new RuntimeException("Batch sync failed: " + e.getMessage(), e); } } - // End of Data Upload Repository - public List> getMasterDataFromTable(String schema, String table, String columnNames, - String masterType, Timestamp lastDownloadDate, Integer vanID, Integer psmID) throws Exception { + String masterType, Timestamp lastDownloadDate, Integer vanID, Integer psmID) { jdbcTemplate = getJdbcTemplate(); - List> resultSetList = new ArrayList<>(); - StringBuilder baseQueryBuilder = new StringBuilder(" SELECT ").append(columnNames).append(" FROM ").append(schema).append(".").append(table); List params = new ArrayList<>(); + if (!isValidSchemaName(schema) || !isValidTableName(table) || !isValidColumnNamesList(columnNames)) { + throw new IllegalArgumentException("Invalid schema, table, or column names."); + } + + StringBuilder queryBuilder = new StringBuilder("SELECT ").append(columnNames) + .append(" FROM ").append(schema).append(".").append(table); + if (masterType != null) { if (lastDownloadDate != null) { - baseQueryBuilder.append(" WHERE LastModDate >= ? "); + queryBuilder.append(" WHERE LastModDate >= ?"); params.add(lastDownloadDate); - if (masterType.equalsIgnoreCase("V")) { - baseQueryBuilder.append(" AND VanID = ? "); + if ("V".equalsIgnoreCase(masterType)) { + queryBuilder.append(" AND VanID = ?"); params.add(vanID); - } else if (masterType.equalsIgnoreCase("P")) { - baseQueryBuilder.append(" AND ProviderServiceMapID = ? "); + } else if ("P".equalsIgnoreCase(masterType)) { + queryBuilder.append(" AND ProviderServiceMapID = ?"); params.add(psmID); } } else { - if (masterType.equalsIgnoreCase("V")) { - baseQueryBuilder.append(" WHERE VanID = ? "); + queryBuilder.append(" WHERE "); + if ("V".equalsIgnoreCase(masterType)) { + queryBuilder.append("VanID = ?"); params.add(vanID); - } else if (masterType.equalsIgnoreCase("P")) { - baseQueryBuilder.append(" WHERE ProviderServiceMapID = ? "); + } else if ("P".equalsIgnoreCase(masterType)) { + queryBuilder.append("ProviderServiceMapID = ?"); params.add(psmID); } } } - String finalQuery = baseQueryBuilder.toString(); - logger.info("Select query central: {}", finalQuery); - logger.info("Last Downloaded Date: {}", lastDownloadDate); - logger.info("Query Params: {}", params); - System.out.println("Select query central: " + finalQuery); - System.out.println("Last Downloaded Date: " + lastDownloadDate); - System.out.println("Query Params: " + params); - try { - if (params.isEmpty()) { - resultSetList = jdbcTemplate.queryForList(finalQuery); - } else { - resultSetList = jdbcTemplate.queryForList(finalQuery, params.toArray()); - } + // Safe dynamic SQL: All dynamic parts (table names, columns, etc.) are + // validated or hardcoded. + // Parameter values are bound safely using prepared statement placeholders (?). + + return jdbcTemplate.queryForList(queryBuilder.toString(), params.toArray()); } catch (Exception e) { - System.out.println("Error fetching master data from table " + table + ": " + e.getMessage()); - logger.error("Error fetching master data from table {}: {}", table, e.getMessage(), e); + logger.error("Error fetching master data: {}", e.getMessage(), e); throw new RuntimeException("Failed to fetch master data: " + e.getMessage(), e); } -System.out.println("Result set Details size: " + resultSetList.size()); - logger.info("Result set Details size: {}", resultSetList.size()); - return resultSetList; } - public List> getBatchForBenDetails(String schema, String table, String columnNames, - String whereClause, int limit, int offset) { + public List> getBatchForBenDetails(SyncUploadDataDigester digester, + String whereClause, int limit, int offset) { jdbcTemplate = getJdbcTemplate(); - String query = "SELECT " + columnNames + " FROM " + schema + "." + table + whereClause + " LIMIT ? OFFSET ?"; - System.out.println("Fetching batch for beneficiary details. Query: " + query + ", Limit: " + limit + ", Offset: " + offset); - logger.debug("Fetching batch for beneficiary details. Query: {}, Limit: {}, Offset: {}", query, limit, offset); - try { - return jdbcTemplate.queryForList(query, limit, offset); - } catch (Exception e) { - logger.error("Error fetching batch for beneficiary details from table {}: {}", table, e.getMessage(), e); - System.out.println("Error fetching batch for beneficiary details from table " + table + ": " + e.getMessage()); - throw new RuntimeException("Failed to fetch batch data: " + e.getMessage(), e); - } - } - // End of Data Download Repository + String schema = digester.getSchemaName(); + String table = digester.getTableName(); + String columnNames = digester.getServerColumns(); + + if (!isValidSchemaName(schema) || !isValidTableName(table) || !isValidColumnNamesList(columnNames)) { + throw new IllegalArgumentException("Invalid schema, table, or column names."); + } + // Safe dynamic SQL: Schema, table, and column names are validated against + // predefined whitelists. + // Only trusted values are used in the query string. + // limit and offset are passed as parameters to prevent SQL injection. + String query = String.format("SELECT %s FROM %s.%s %s LIMIT ? OFFSET ?", columnNames, schema, table, + whereClause); // NOSONAR + + try { + + return jdbcTemplate.queryForList(query, limit, offset); + } catch (Exception e) { + logger.error("Error fetching batch details: {}", e.getMessage(), e); + throw new RuntimeException("Failed to fetch batch data: " + e.getMessage(), e); + } + } } \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 4a97725b..cd33c2b5 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -24,10 +24,10 @@ import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.HashMap; - +import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,7 +37,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; - @Service public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB { @@ -47,44 +46,57 @@ public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB @Autowired private DataSyncRepositoryCentral dataSyncRepositoryCentral; + private static final Map> TABLE_GROUPS = new HashMap<>(); static { - // Group 1: Master data or less frequently changing data - TABLE_GROUPS.put(1, Arrays.asList("m_beneficiaryregidmapping", "m_another_master_table")); - - // Group 2: Transactional data that might involve facility ID - TABLE_GROUPS.put(2, Arrays.asList("t_indent", "t_indentorder", "t_indentissue", "t_stocktransfer", "t_itemstockentry")); + TABLE_GROUPS.put(1, + Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount", "i_beneficiaryaddress", + "i_beneficiarycontacts", "i_beneficiarydetails", "i_beneficiaryfamilymapping", + "i_beneficiaryidentity", "i_beneficiarymapping")); + + TABLE_GROUPS.put(2, + Arrays.asList("t_benvisitdetail", "t_phy_anthropometry", "t_phy_vitals", "t_benadherence", "t_anccare", + "t_pnccare", "t_ncdscreening", "t_ncdcare", "i_ben_flow_outreach", "t_covid19", "t_idrsdetails", + "t_physicalactivity")); + + TABLE_GROUPS.put(3, + Arrays.asList("t_phy_generalexam", "t_phy_headtotoe", "t_sys_obstetric", "t_sys_gastrointestinal", + "t_sys_cardiovascular", "t_sys_respiratory", "t_sys_centralnervous", + "t_sys_musculoskeletalsystem", "t_sys_genitourinarysystem")); + + TABLE_GROUPS.put(4, + Arrays.asList("t_ancdiagnosis", "t_ncddiagnosis", "t_pncdiagnosis", "t_benchefcomplaint", + "t_benclinicalobservation", "t_prescription", "t_prescribeddrug", "t_lab_testorder", + "t_benreferdetails")); + + TABLE_GROUPS.put(5, Arrays.asList("t_lab_testresult", "t_physicalstockentry", "t_patientissue", + "t_facilityconsumption", "t_itemstockentry", "t_itemstockexit")); + + TABLE_GROUPS.put(6, Arrays.asList("t_benmedhistory", "t_femaleobstetrichistory", "t_benmenstrualdetails", + "t_benpersonalhabit", "t_childvaccinedetail1", "t_childvaccinedetail2", "t_childoptionalvaccinedetail", + "t_ancwomenvaccinedetail", "t_childfeedinghistory", "t_benallergyhistory", "t_bencomorbiditycondition", + "t_benmedicationhistory", "t_benfamilyhistory", "t_perinatalhistory", "t_developmenthistory")); + + TABLE_GROUPS.put(7, + Arrays.asList("t_cancerfamilyhistory", "t_cancerpersonalhistory", "t_cancerdiethistory", + "t_cancerobstetrichistory", "t_cancervitals", "t_cancersignandsymptoms", "t_cancerlymphnode", + "t_canceroralexamination", "t_cancerbreastexamination", "t_cancerabdominalexamination", + "t_cancergynecologicalexamination", "t_cancerdiagnosis", "t_cancerimageannotation")); - // Group 3: High volume transactional data - TABLE_GROUPS.put(3, Arrays.asList("i_beneficiarydetails", "t_patientissue", "t_physicalstockentry", - "t_stockadjustment", "t_saitemmapping", "t_patientreturn", - "t_facilityconsumption", "t_itemstockexit")); - // Add more groups as needed, up to 9 - TABLE_GROUPS.put(1, Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount","i_beneficiaryaddress","i_beneficiarycontacts","i_beneficiarydetails","i_beneficiaryfamilymapping","i_beneficiaryidentity","i_beneficiarymapping")); - - TABLE_GROUPS.put(2, Arrays.asList("t_benvisitdetail","t_phy_anthropometry","t_phy_vitals","t_benadherence","t_anccare","t_pnccare","t_ncdscreening","t_ncdcare","i_ben_flow_outreach","t_covid19","t_idrsdetails","t_physicalactivity")); - - TABLE_GROUPS.put(3, Arrays.asList("t_phy_generalexam","t_phy_headtotoe","t_sys_obstetric","t_sys_gastrointestinal","t_sys_cardiovascular","t_sys_respiratory","t_sys_centralnervous","t_sys_musculoskeletalsystem","t_sys_genitourinarysystem")); - - TABLE_GROUPS.put(4, Arrays.asList("t_ancdiagnosis","t_ncddiagnosis","t_pncdiagnosis","t_benchefcomplaint","t_benclinicalobservation","t_prescription","t_prescribeddrug","t_lab_testorder","t_benreferdetails")); - - TABLE_GROUPS.put(5, Arrays.asList("t_lab_testresult","t_physicalstockentry","t_patientissue","t_facilityconsumption","t_itemstockentry","t_itemstockexit")); + TABLE_GROUPS.put(8, Arrays.asList("i_beneficiaryimage")); - TABLE_GROUPS.put(6, Arrays.asList("t_benmedhistory","t_femaleobstetrichistory","t_benmenstrualdetails","t_benpersonalhabit","t_childvaccinedetail1","t_childvaccinedetail2","t_childoptionalvaccinedetail","t_ancwomenvaccinedetail","t_childfeedinghistory","t_benallergyhistory","t_bencomorbiditycondition","t_benmedicationhistory","t_benfamilyhistory","t_perinatalhistory","t_developmenthistory")); + TABLE_GROUPS.put(9, + Arrays.asList("t_itemstockentry", "t_itemstockexit", "t_patientissue", "t_physicalstockentry", + "t_stockadjustment", "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentissue", "t_indentorder", "t_saitemmapping")); - TABLE_GROUPS.put(7, Arrays.asList("t_cancerfamilyhistory","t_cancerpersonalhistory","t_cancerdiethistory","t_cancerobstetrichistory","t_cancervitals","t_cancersignandsymptoms","t_cancerlymphnode","t_canceroralexamination","t_cancerbreastexamination","t_cancerabdominalexamination","t_cancergynecologicalexamination","t_cancerdiagnosis","t_cancerimageannotation")); + } - TABLE_GROUPS.put(8, Arrays.asList("i_beneficiaryimage")); - - TABLE_GROUPS.put(9, Arrays.asList("t_itemstockentry","t_itemstockexit","t_patientissue","t_physicalstockentry","t_stockadjustment","t_stocktransfer","t_patientreturn","t_facilityconsumption","t_indent","t_indentissue","t_indentorder","t_saitemmapping")); - - } + public String syncDataToServer(String requestOBJ, String Authorization) throws Exception { - public String syncDataToServer(String requestOBJ, String Authorization, String token) throws Exception { - logger.info("Starting syncDataToServer. Token: {}", token); ObjectMapper mapper = new ObjectMapper(); SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); - +List> dataToBesync = syncUploadDataDigester.getSyncData(); if (syncUploadDataDigester == null || syncUploadDataDigester.getTableName() == null) { logger.error("Invalid SyncUploadDataDigester object or tableName is null."); return "Error: Invalid sync request."; @@ -101,8 +113,7 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); return "Sync failed for m_beneficiaryregidmapping."; } - } - if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { + } else if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { String result = update_I_BeneficiaryDetails_for_processed_in_batches(syncUploadDataDigester); if ("data sync passed".equals(result)) { return "Sync successful for i_beneficiarydetails."; @@ -111,28 +122,35 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t return "Sync failed for i_beneficiarydetails."; } } else { - // Determine the group for the current table or iterate through all if no specific table is given + // Determine the group for the current table or iterate through all if no + // specific table is given boolean syncSuccess = true; String errorMessage = ""; - // If a specific table is provided in the request, try to find its group and sync only that table. + // If a specific table is provided in the request, try to find its group and + // sync only that table. // Otherwise, iterate through all defined groups. if (syncTableName != null && !syncTableName.isEmpty()) { boolean foundInGroup = false; - for (Map.Entry> entry : TABLE_GROUPS.entrySet()) { - if (entry.getValue().contains(syncTableName.toLowerCase())) { - logger.info("Attempting to sync table '{}' from Group {}", syncTableName, entry.getKey()); - syncSuccess = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), syncTableName, syncUploadDataDigester); + + for (Map map : dataToBesync) { + // if (entry.getValue().contains(syncTableName.toLowerCase())) { + if(map.get("tableName") != null + && map.get("tableName").toString().equalsIgnoreCase(syncTableName)) { + syncSuccess = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), syncTableName, + syncUploadDataDigester); foundInGroup = true; break; } } if (!foundInGroup) { - logger.warn("Table '{}' not found in any predefined groups. Proceeding with generic sync logic.", syncTableName); + logger.warn("Table '{}' not found in any predefined groups. Proceeding with generic sync logic.", + syncTableName); syncSuccess = performGenericTableSync(syncUploadDataDigester); } } else { - // If no specific table is in the request (e.g., a general sync trigger), iterate through groups + // If no specific table is in the request (e.g., a general sync trigger), + // iterate through groups logger.info("No specific table provided. Attempting to sync all tables group by group."); for (Map.Entry> entry : TABLE_GROUPS.entrySet()) { Integer groupId = entry.getKey(); @@ -142,26 +160,34 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t try { // Create a new digester for each table within the group, // or adapt if the original digester contains data for multiple tables. - // For simplicity, assuming syncDataDigester needs to be tailored per table or group. - // If your requestOBJ contains data for only one table at a time, this loop might need adjustment + // For simplicity, assuming syncDataDigester needs to be tailored per table or + // group. + // If your requestOBJ contains data for only one table at a time, this loop + // might need adjustment // to fetch data for each table in the group. // For now, it will use the syncData from the original requestOBJ, which implies // the original requestOBJ should represent data for a single table. - // A more robust solution would involve fetching data for each table dynamically. - boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), table, syncUploadDataDigester); + // A more robust solution would involve fetching data for each table + // dynamically. + boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), + table, syncUploadDataDigester); if (!currentTableSyncResult) { syncSuccess = false; errorMessage += "Failed to sync table: " + table + " in Group " + groupId + ". "; - logger.error("Sync failed for table '{}' in Group {}. Error: {}", table, groupId, errorMessage); - // Optionally, you can choose to break here or continue to sync other tables in the group/next group + logger.error("Sync failed for table '{}' in Group {}. Error: {}", table, groupId, + errorMessage); + // Optionally, you can choose to break here or continue to sync other tables in + // the group/next group // For now, let's continue to attempt other tables within the group. } else { logger.info("Successfully synced table: {} in Group {}", table, groupId); } } catch (Exception e) { syncSuccess = false; - errorMessage += "Exception during sync for table: " + table + " in Group " + groupId + ": " + e.getMessage() + ". "; - logger.error("Exception during sync for table '{}' in Group {}: {}", table, groupId, e.getMessage(), e); + errorMessage += "Exception during sync for table: " + table + " in Group " + groupId + ": " + + e.getMessage() + ". "; + logger.error("Exception during sync for table '{}' in Group {}: {}", table, groupId, + e.getMessage(), e); // Continue to attempt other tables } } @@ -182,35 +208,35 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t * with relevant data for the `currentTableName` before calling this. * In a real-world scenario, you might fetch data for each table here. */ - private boolean syncTablesInGroup(String schemaName, String currentTableName, SyncUploadDataDigester originalDigester) { + private boolean syncTablesInGroup(String schemaName, String currentTableName, + SyncUploadDataDigester originalDigester) { logger.info("Attempting generic sync for table: {}", currentTableName); // This is a simplification. In a production system, you would likely need // to retrieve the actual data for 'currentTableName' from the local DB // based on the group sync approach. For this example, we'll assume the // originalDigester's syncData is relevant or needs to be re-populated. - // Create a new digester instance or modify the existing one for the current table + // Create a new digester instance or modify the existing one for the current + // table SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); tableSpecificDigester.setSchemaName(schemaName); tableSpecificDigester.setTableName(currentTableName); tableSpecificDigester.setSyncedBy(originalDigester.getSyncedBy()); tableSpecificDigester.setFacilityID(originalDigester.getFacilityID()); tableSpecificDigester.setVanAutoIncColumnName(originalDigester.getVanAutoIncColumnName()); - tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); // Assuming serverColumns is generic or set per table + tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); // Assuming serverColumns is + // generic or set per table // !!! IMPORTANT: You'll need to fetch the data for 'currentTableName' from your local DB here. // The `originalDigester.getSyncData()` might not be correct for all tables in a group. // For demonstration, I'm just using the original digester's data, which is likely incorrect - // if you're syncing multiple tables from a single request. - // You'll need a method like: dataSyncRepositoryLocal.getDataForTable(currentTableName, ...) - tableSpecificDigester.setSyncData(originalDigester.getSyncData()); // Placeholder: Replace with actual data fetching - + tableSpecificDigester.setSyncData(originalDigester.getSyncData()); return performGenericTableSync(tableSpecificDigester); } - - private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID(SyncUploadDataDigester syncUploadDataDigester) { - logger.info("Processing update_M_BeneficiaryRegIdMapping_for_provisioned_benID for table: {}", syncUploadDataDigester.getTableName()); + private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID( + SyncUploadDataDigester syncUploadDataDigester) { + List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncData = new ArrayList<>(); @@ -226,20 +252,24 @@ private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID(SyncUpload objArr[3] = String.valueOf(map.get("VanID")); syncData.add(objArr); } else { - logger.warn("Skipping record in m_beneficiaryregidmapping due to missing BenRegId, BeneficiaryID, or VanID: {}", map); + logger.warn( + "Skipping record in m_beneficiaryregidmapping due to missing BenRegId, BeneficiaryID, or VanID: {}", + map); } } if (!syncData.isEmpty()) { try { int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(), SERVER_COLUMNS_NOT_REQUIRED, query, syncData); + syncUploadDataDigester.getTableName(), syncUploadDataDigester.getServerColumns(), query, syncData); if (i.length == syncData.size()) { logger.info("Successfully updated {} records for m_beneficiaryregidmapping.", i.length); return "data sync passed"; } else { - logger.error("Partial update for m_beneficiaryregidmapping. Expected {} updates, got {}. Failed records: {}", syncData.size(), i.length, getFailedRecords(i, syncData)); + logger.error( + "Partial update for m_beneficiaryregidmapping. Expected {} updates, got {}. Failed records: {}", + syncData.size(), i.length, getFailedRecords(i, syncData)); return "Partial data sync for m_beneficiaryregidmapping."; } } catch (Exception e) { @@ -263,48 +293,47 @@ private String getqueryFor_M_BeneficiaryRegIdMapping(String schemaName, String t queryBuilder.append(" BeneficiaryID = ? "); queryBuilder.append(" AND "); queryBuilder.append(" VanID = ? "); + return queryBuilder.toString(); } public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDataDigester syncUploadDataDigester) { - logger.info("Processing update_I_BeneficiaryDetails_for_processed_in_batches for table: {}", syncUploadDataDigester.getTableName()); - List syncData = new ArrayList<>(); + List syncData = new ArrayList<>(); - String query = getQueryFor_I_BeneficiaryDetails(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName()); + String query = getQueryFor_I_BeneficiaryDetails(syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName()); - int limit = 1000; - int offset = 0; - int totalProcessed = 0; + int limit = 1000; + int offset = 0; + int totalProcessed = 0; - String problematicWhereClause = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // Define it explicitly + String problematicWhereClause = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // Define it explicitly while (true) { List> batch; try { - logger.info("DEBUG: Passing whereClause to getBatchForBenDetails: [{}]", problematicWhereClause); - - batch = dataSyncRepositoryCentral.getBatchForBenDetails( - syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(), - syncUploadDataDigester.getServerColumns(), - problematicWhereClause, // Use the variable - limit, - offset); - } catch (Exception e) { - logger.error("Error fetching batch for i_beneficiarydetails: {}", e.getMessage(), e); - return "Error fetching data for i_beneficiarydetails: " + e.getMessage(); - } - - if (totalProcessed > 0 || syncData.isEmpty()) { // syncData.isEmpty() means no records to process, still a "success" - logger.info("Finished processing i_beneficiarydetails. Total records processed: {}", totalProcessed); - return "data sync passed"; - } else { - logger.error("No records were processed for i_beneficiarydetails or an unknown error occurred."); - return "No data processed or sync failed for i_beneficiarydetails."; + + batch = dataSyncRepositoryCentral.getBatchForBenDetails( + syncUploadDataDigester, + problematicWhereClause, + limit, + offset); + } catch (Exception e) { + logger.error("Error fetching batch for i_beneficiarydetails: {}", e.getMessage(), e); + return "Error fetching data for i_beneficiarydetails: " + e.getMessage(); + } + + if (totalProcessed > 0 || syncData.isEmpty()) { // syncData.isEmpty() means no records to process, still a + // "success" + logger.info("Finished processing i_beneficiarydetails. Total records processed: {}", totalProcessed); + return "data sync passed"; + } else { + logger.error("No records were processed for i_beneficiarydetails or an unknown error occurred."); + return "No data processed or sync failed for i_beneficiarydetails."; + } } } - } + private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableName) { StringBuilder queryBuilder = new StringBuilder(" UPDATE "); queryBuilder.append(schemaName).append(".").append(tableName); @@ -317,12 +346,11 @@ private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableN return queryBuilder.toString(); } - /** - * Handles the generic synchronization logic for tables not covered by specific handlers. + * Handles the generic synchronization logic for tables not covered by specific + * handlers. */ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester) { - logger.info("Performing generic sync for table: {}", syncUploadDataDigester.getTableName()); List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncDataListInsert = new ArrayList<>(); List syncDataListUpdate = new ArrayList<>(); @@ -342,10 +370,16 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig String vanID = String.valueOf(map.get("VanID")); int syncFacilityID = 0; - // Update SyncedBy and SyncedDate in the map itself before processing + // Update SyncedBy and SyncedDate in the xmap itself before processing map.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); map.put("SyncedDate", String.valueOf(LocalDateTime.now())); // Ensure column name matches DB - + if (map.get("CreatedDate") == null || map.get("created_date") == null) { + logger.info("CreatedDate was null for table: " + syncTableName + ", inserting current time"); + if(map.get("CreatedDate") == null) + map.put("CreatedDate", String.valueOf(LocalDateTime.now())); + if(map.get("created_date") == null) + map.put("created_date", String.valueOf(LocalDateTime.now())); + } // Facility ID processing if (facilityIDFromDigester != null) { // Determine the 'Processed' status based on facility ID for specific tables @@ -370,7 +404,8 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig break; } case "t_stocktransfer": { - if (map.containsKey("TransferToFacilityID") && map.get("TransferToFacilityID") instanceof Double) { + if (map.containsKey("TransferToFacilityID") + && map.get("TransferToFacilityID") instanceof Double) { Double transferToFacilityID = (Double) map.get("TransferToFacilityID"); if (transferToFacilityID.intValue() == facilityIDFromDigester) { map.put("Processed", "P"); @@ -388,7 +423,8 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig break; } default: - // No specific facility ID logic for other tables, maintain existing 'Processed' status or default + // No specific facility ID logic for other tables, maintain existing 'Processed' + // status or default break; } } @@ -400,13 +436,13 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig syncFacilityID = ((Double) map.get("SyncFacilityID")).intValue(); } - int recordCheck; try { recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); } catch (Exception e) { - logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", syncTableName, vanSerialNo, vanID, e.getMessage(), e); + logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", + syncTableName, vanSerialNo, vanID, e.getMessage(), e); return false; // Critical error, stop sync for this table } @@ -417,7 +453,8 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig for (String column : serverColumnsList) { Object value = map.get(column.trim()); - // Handle boolean conversion if necessary, though String.valueOf should generally work for prepared statements + // Handle boolean conversion if necessary, though String.valueOf should + // generally work for prepared statements if (value instanceof Boolean) { currentRecordValues.add(value); } else if (value != null) { @@ -453,11 +490,15 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig if (!syncDataListInsert.isEmpty()) { String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, syncUploadDataDigester.getServerColumns()); + try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, syncUploadDataDigester.getServerColumns(), queryInsert, syncDataListInsert); + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + syncUploadDataDigester.getServerColumns(), queryInsert, syncDataListInsert); if (i.length != syncDataListInsert.size()) { insertSuccess = false; - logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", syncTableName, syncDataListInsert.size(), i.length, getFailedRecords(i, syncDataListInsert)); + logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", + syncTableName, syncDataListInsert.size(), i.length, + getFailedRecords(i, syncDataListInsert)); } else { logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); } @@ -469,11 +510,15 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig if (!syncDataListUpdate.isEmpty()) { String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, syncUploadDataDigester.getServerColumns(), syncTableName); + // Ensure the update query is correct and matches the expected format try { - int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); + int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); if (j.length != syncDataListUpdate.size()) { updateSuccess = false; - logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", syncTableName, syncDataListUpdate.size(), j.length, getFailedRecords(j, syncDataListUpdate)); + logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", + syncTableName, syncDataListUpdate.size(), j.length, + getFailedRecords(j, syncDataListUpdate)); } else { logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); } @@ -518,16 +563,24 @@ public String getQueryToUpdateDataToServerDB(String schemaName, String serverCol StringBuilder preparedStatementSetter = new StringBuilder(); - if (columnsArr != null && columnsArr.length > 0) { + if (columnsArr != null && columnsArr.length > 0) { for (int i = 0; i < columnsArr.length; i++) { - preparedStatementSetter.append(columnsArr[i].trim()); - preparedStatementSetter.append(" = ?"); + String columnName = columnsArr[i].trim(); // ← NEW LINE + + // Special handling for CreatedDate - use COALESCE to prevent NULL + if (columnName.equalsIgnoreCase("CreatedDate")) { // ← NEW BLOCK + preparedStatementSetter.append(columnName); + preparedStatementSetter.append(" = COALESCE(?, CURRENT_TIMESTAMP)"); + } else { + preparedStatementSetter.append(columnName); + preparedStatementSetter.append(" = ?"); + } + if (i < columnsArr.length - 1) { preparedStatementSetter.append(", "); } } } - StringBuilder queryBuilder = new StringBuilder(" UPDATE "); queryBuilder.append(schemaName).append(".").append(tableName); queryBuilder.append(" SET "); @@ -549,18 +602,21 @@ public String getQueryToUpdateDataToServerDB(String schemaName, String serverCol private String getFailedRecords(int[] results, List data) { List failedRecordsInfo = new ArrayList<>(); for (int k = 0; k < results.length; k++) { - // In Spring JDBC batchUpdate, a value of Statement.EXECUTE_FAILED or Statement.SUCCESS_NO_INFO + // In Spring JDBC batchUpdate, a value of Statement.EXECUTE_FAILED or + // Statement.SUCCESS_NO_INFO // usually indicates a failure or success without specific row count. // A common return value for success is 1 (for one row updated/inserted). if (results[k] < 1) { // Assuming 1 means success, and anything else (0, -2, etc.) means failure // Attempt to get some identifiable info from the failed record if (data.get(k).length > 0) { - failedRecordsInfo.add("Record at index " + k + " (VanSerialNo/ID: " + data.get(k)[data.get(k).length - 2] + ")"); + failedRecordsInfo.add( + "Record at index " + k + " (VanSerialNo/ID: " + data.get(k)[data.get(k).length - 2] + ")"); } else { failedRecordsInfo.add("Record at index " + k + " (No identifiable info)"); } } } + logger.info("Failed records info: {}", failedRecordsInfo); return String.join("; ", failedRecordsInfo); } } \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/utils/JwtUserIdValidationFilter.java b/src/main/java/com/iemr/mmu/utils/JwtUserIdValidationFilter.java index 1dba0e78..fbeab7ad 100644 --- a/src/main/java/com/iemr/mmu/utils/JwtUserIdValidationFilter.java +++ b/src/main/java/com/iemr/mmu/utils/JwtUserIdValidationFilter.java @@ -38,13 +38,10 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo String origin = request.getHeader("Origin"); - logger.debug("Incoming Origin: {}", origin); - logger.debug("Allowed Origins Configured: {}", allowedOrigins); - if (origin != null && isOriginAllowed(origin)) { response.setHeader("Access-Control-Allow-Origin", origin); response.setHeader("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS"); - response.setHeader("Access-Control-Allow-Headers", "Authorization, Content-Type, Accept, Jwttoken"); + response.setHeader("Access-Control-Allow-Headers", "Authorization, Content-Type, Accept, Jwttoken,serverAuthorization, ServerAuthorization, serverauthorization, Serverauthorization"); response.setHeader("Vary", "Origin"); response.setHeader("Access-Control-Allow-Credentials", "true"); } else { @@ -59,7 +56,6 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo String path = request.getRequestURI(); String contextPath = request.getContextPath(); - logger.info("JwtUserIdValidationFilter invoked for path: " + path); // Log cookies for debugging Cookie[] cookies = request.getCookies(); @@ -76,7 +72,6 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo // Log headers for debugging String jwtTokenFromHeader = request.getHeader("Jwttoken"); - logger.info("JWT token from header: "); // Skip login and public endpoints if (path.equals(contextPath + "/user/userAuthenticate") @@ -104,7 +99,6 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo return; } } else if (jwtFromHeader != null) { - logger.info("Validating JWT token from header"); if (jwtAuthenticationUtil.validateUserIdAndJwtToken(jwtFromHeader)) { AuthorizationHeaderRequestWrapper authorizationHeaderRequestWrapper = new AuthorizationHeaderRequestWrapper( request, ""); @@ -113,7 +107,6 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo } } else { String userAgent = request.getHeader("User-Agent"); - logger.info("User-Agent: " + userAgent); if (userAgent != null && isMobileClient(userAgent) && authHeader != null) { try { UserAgentContext.setUserAgent(userAgent); @@ -123,6 +116,7 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo } return; } + } logger.warn("No valid authentication token found"); @@ -156,7 +150,8 @@ private boolean isMobileClient(String userAgent) { if (userAgent == null) return false; userAgent = userAgent.toLowerCase(); - return userAgent.contains("okhttp"); // iOS (custom clients) + logger.info("User-Agent: " + userAgent); + return userAgent.contains("okhttp") || userAgent.contains("java/"); // iOS (custom clients) } private String getJwtTokenFromCookies(HttpServletRequest request) { @@ -179,4 +174,4 @@ private void clearUserIdCookie(HttpServletResponse response) { cookie.setMaxAge(0); // Invalidate the cookie response.addCookie(cookie); } -} +} \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java b/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java index d1ea0efd..33556d22 100644 --- a/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java +++ b/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java @@ -39,11 +39,16 @@ public static HttpEntity createRequestEntity(Object body, String authori MultiValueMap headers = new LinkedMultiValueMap<>(); headers.add(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE + ";charset=utf-8"); - - if (authorization != null && !authorization.isEmpty()) { +logger.info("token: {}", jwtToken); + if (authorization != null && !authorization.isEmpty() && !jwtToken.equalsIgnoreCase("datasync")) { headers.add(HttpHeaders.AUTHORIZATION, "Bearer " + authorization); } + if(authorization != null && !authorization.isEmpty() && jwtToken.equalsIgnoreCase("datasync")) + { + headers.add(HttpHeaders.AUTHORIZATION, authorization); + } + if (jwtToken == null || jwtToken.isEmpty()) { ServletRequestAttributes attrs = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); @@ -55,14 +60,11 @@ public static HttpEntity createRequestEntity(Object body, String authori logger.error("Error while getting JWT token from cookie: {}", e.getMessage()); } } - } + } - - if (jwtToken != null && !jwtToken.isEmpty()) { + if (jwtToken != null && !jwtToken.isEmpty() && !jwtToken.equalsIgnoreCase("datasync")) { headers.add(HttpHeaders.COOKIE, "Jwttoken=" + jwtToken); } - - return new HttpEntity<>(body, headers); } } \ No newline at end of file From 2828fe6b68035e4f0b7c1a9937312cc622cc91f5 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Mon, 11 Aug 2025 09:54:47 +0530 Subject: [PATCH 18/23] fix: remove the token for server authorization (#111) --- .../service/dataSyncActivity/DownloadDataFromServerImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java index 252c9ce7..afb42a4b 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java @@ -186,7 +186,7 @@ private int downloadDataFromServer(SyncDownloadMaster syncDownloadMaster,String // initializing RestTemplate RestTemplate restTemplate = new RestTemplate(); // Provide the required second argument, e.g., an empty string or appropriate authorization token - HttpEntity request = RestTemplateUtil.createRequestEntity(syncDownloadMaster, ServerAuthorization, jwtToken); + HttpEntity request = RestTemplateUtil.createRequestEntity(syncDownloadMaster, ServerAuthorization, "datasync"); // Call rest-template to call API to download master data for given table ResponseEntity response = restTemplate.exchange(dataSyncDownloadUrl, HttpMethod.POST, request, String.class); From 604bd641da7f80d28bab6a9b7b167aab36ed3753 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Thu, 14 Aug 2025 10:42:43 +0530 Subject: [PATCH 19/23] Fix the datasync Demographics Issue (#112) * fix: remove condition for i_beneficiarydetails * fix: add logs * fix: add logs * fix: remove db_iemr * fix: add log for show column names too * fix: add date-format condition * fix: change valid column name * fix: change valid column name * fix: change valid column name * fix: change valid column name * fix: update insert query * fix: update cleaned column list * fix: date conversion * fix: conversion date-time * fix: add date conversion * fix: logs added * fix: new logger * fix: revert the date condition * fix: revert insert code * fix: revert insert code * fix: date format issue * fix: logs add * fix: log for group and group lsit * fix: clean the code --------- Co-authored-by: vishwab1 --- .../location/DistrictBlockMasterRepo.java | 2 +- .../dataSyncActivity/DataSyncRepository.java | 21 +- .../UploadDataToServerImpl.java | 17 +- .../DataSyncRepositoryCentral.java | 28 +- .../GetDataFromVanAndSyncToDBImpl.java | 454 ++++++++---------- 5 files changed, 242 insertions(+), 280 deletions(-) diff --git a/src/main/java/com/iemr/mmu/repo/location/DistrictBlockMasterRepo.java b/src/main/java/com/iemr/mmu/repo/location/DistrictBlockMasterRepo.java index db6a9d40..97452c65 100644 --- a/src/main/java/com/iemr/mmu/repo/location/DistrictBlockMasterRepo.java +++ b/src/main/java/com/iemr/mmu/repo/location/DistrictBlockMasterRepo.java @@ -36,7 +36,7 @@ public interface DistrictBlockMasterRepo extends CrudRepository getDistrictBlockMaster(@Param("districtID") Integer districtID); - @Query(value = " SELECT distinct StateID, StateName,WorkingDistrictID,WorkingDistrictName,blockid,blockname,villageid,villagename FROM db_iemr.v_userservicerolemapping WHERE UserID = :userId and UserServciceRoleDeleted is false",nativeQuery = true) + @Query(value = " SELECT distinct StateID, StateName,WorkingDistrictID,WorkingDistrictName,blockid,blockname,villageid,villagename FROM v_userservicerolemapping WHERE UserID = :userId and UserServciceRoleDeleted is false",nativeQuery = true) public List getUserservicerolemapping(@Param("userId") Integer userId); } diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java index 9b020a4e..f7a55d4a 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java @@ -88,7 +88,12 @@ public List> getDataForGivenSchemaAndTable(String schema, St } } - + + logger.info("Select Query started:"); + logger.info("Table Name: {}", table); + + logger.info("Select Query: {}", baseQuery); + resultSetList = jdbcTemplate.queryForList(baseQuery); return resultSetList; } @@ -98,14 +103,16 @@ public int updateProcessedFlagInVan(String schemaName, String tableName, StringB jdbcTemplate = getJdbcTemplate(); String query = ""; + logger.info("Updating processed flag in table: " + tableName + " for vanSerialNos: " + vanSerialNos); + if (tableName != null && tableName.toLowerCase().equals("i_ben_flow_outreach")) { - query = "UPDATE " + schemaName + "." + tableName - + " SET created_date = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " - + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; + query = "UPDATE " + schemaName + "." + tableName + + " SET created_date = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " + + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; } else { - query = "UPDATE " + schemaName + "." + tableName - + " SET CreatedDate = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " - + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; + query = "UPDATE " + schemaName + "." + tableName + + " SET CreatedDate = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " + + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; } Timestamp syncedDate = new Timestamp(System.currentTimeMillis()); diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java index 2c24dc2c..51d8a32b 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java @@ -115,7 +115,7 @@ public String getDataToSyncToServer(int vanID, String user, String Authorization public String syncIntercepter(int vanID, String user, String Authorization, String token) throws Exception { // sync activity trigger - + String serverAcknowledgement = startDataSync(vanID, user, Authorization, token); return serverAcknowledgement; @@ -226,7 +226,7 @@ private String startDataSync(int vanID, String user, String Authorization, Strin Map response = new HashMap<>(); response.put("response", "Data sync failed"); response.put("groupsProgress", responseStatus); - objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(response); + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(response); return objectMapper.writerWithDefaultPrettyPrinter() .writeValueAsString(Collections.singletonMap("data", response)); } else { @@ -275,6 +275,7 @@ public List getVanAndServerColumnList(Integer groupID) throws private List> getDataToSync(String schemaName, String tableName, String columnNames) throws Exception { + logger.info("Fetching data to sync for schema: {}, table: {}, columns: {}", schemaName, tableName, columnNames); List> resultSetList = dataSyncRepository.getDataForGivenSchemaAndTable(schemaName, tableName, columnNames); if (resultSetList != null) { @@ -316,11 +317,11 @@ private List> getBatchOfAskedSizeDataToSync(List> dataToBesync, String user, String Authorization, String token) + String serverColumns, List> dataToBesync, String user, String Authorization, + String token) throws Exception { - + RestTemplate restTemplate = new RestTemplate(); - Integer facilityID = masterVanRepo.getFacilityID(vanID); @@ -340,10 +341,10 @@ public String syncDataToServer(int vanID, String schemaName, String tableName, S dataMap.put("facilityID", facilityID); String requestOBJ = gson.toJson(dataMap); - HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization,"datasync"); + HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization, "datasync"); ResponseEntity response = restTemplate.exchange(dataSyncUploadUrl, HttpMethod.POST, request, String.class); - + int i = 0; if (response != null && response.hasBody()) { JSONObject obj = new JSONObject(response.getBody()); @@ -392,4 +393,4 @@ public String getDataSyncGroupDetails() { return null; } -} \ No newline at end of file +} diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java index a5d8422d..6dba4611 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java @@ -88,19 +88,33 @@ private boolean isValidTableName(String tableName) { } private boolean isValidColumnNamesList(String columnNames) { - if (columnNames == null || columnNames.trim().isEmpty()) { - return false; - } - for (String col : columnNames.split(",")) { - if (!isValidDatabaseIdentifierCharacter(col.trim())) { - return false; + if (columnNames == null || columnNames.trim().isEmpty()) { + return false; + } + logger.info("Validating column names: {}", columnNames); + for (String col : columnNames.split(",")) { + String trimmed = col.trim(); + + // Handle date_format(...) style + if (trimmed.toLowerCase().startsWith("date_format(")) { + int openParenIndex = trimmed.indexOf("("); + int commaIndex = trimmed.indexOf(",", openParenIndex); + if (commaIndex > 0) { + trimmed = trimmed.substring(openParenIndex + 1, commaIndex).trim(); } } - return true; + + if (!isValidDatabaseIdentifierCharacter(trimmed)) { + return false; + } } + return true; +} + public int checkRecordIsAlreadyPresentOrNot(String schemaName, String tableName, String vanSerialNo, String vanID, String vanAutoIncColumnName, int syncFacilityID) { + jdbcTemplate = getJdbcTemplate(); List params = new ArrayList<>(); diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index cd33c2b5..fcc68fd3 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -27,7 +27,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,7 +45,6 @@ public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB @Autowired private DataSyncRepositoryCentral dataSyncRepositoryCentral; - private static final Map> TABLE_GROUPS = new HashMap<>(); static { TABLE_GROUPS.put(1, @@ -96,14 +94,15 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E ObjectMapper mapper = new ObjectMapper(); SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); -List> dataToBesync = syncUploadDataDigester.getSyncData(); + List> dataToBesync = syncUploadDataDigester.getSyncData(); + logger.info("Data to be synced: {}", dataToBesync); if (syncUploadDataDigester == null || syncUploadDataDigester.getTableName() == null) { logger.error("Invalid SyncUploadDataDigester object or tableName is null."); return "Error: Invalid sync request."; } String syncTableName = syncUploadDataDigester.getTableName(); - + logger.info("Syncing data for table: {}", syncTableName); // Handle specific tables first, if their logic is distinct if ("m_beneficiaryregidmapping".equalsIgnoreCase(syncTableName)) { String result = update_M_BeneficiaryRegIdMapping_for_provisioned_benID(syncUploadDataDigester); @@ -113,29 +112,15 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); return "Sync failed for m_beneficiaryregidmapping."; } - } else if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { - String result = update_I_BeneficiaryDetails_for_processed_in_batches(syncUploadDataDigester); - if ("data sync passed".equals(result)) { - return "Sync successful for i_beneficiarydetails."; - } else { - logger.error("Sync failed for i_beneficiarydetails: {}", result); - return "Sync failed for i_beneficiarydetails."; - } - } else { - // Determine the group for the current table or iterate through all if no - // specific table is given + } + else { boolean syncSuccess = true; String errorMessage = ""; - - // If a specific table is provided in the request, try to find its group and - // sync only that table. - // Otherwise, iterate through all defined groups. if (syncTableName != null && !syncTableName.isEmpty()) { boolean foundInGroup = false; - - for (Map map : dataToBesync) { - // if (entry.getValue().contains(syncTableName.toLowerCase())) { - if(map.get("tableName") != null + + for (Map map : dataToBesync) { + if (map.get("tableName") != null && map.get("tableName").toString().equalsIgnoreCase(syncTableName)) { syncSuccess = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), syncTableName, syncUploadDataDigester); @@ -149,26 +134,13 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E syncSuccess = performGenericTableSync(syncUploadDataDigester); } } else { - // If no specific table is in the request (e.g., a general sync trigger), - // iterate through groups - logger.info("No specific table provided. Attempting to sync all tables group by group."); + for (Map.Entry> entry : TABLE_GROUPS.entrySet()) { Integer groupId = entry.getKey(); List tablesInGroup = entry.getValue(); - logger.info("Starting sync for Group {}", groupId); for (String table : tablesInGroup) { try { - // Create a new digester for each table within the group, - // or adapt if the original digester contains data for multiple tables. - // For simplicity, assuming syncDataDigester needs to be tailored per table or - // group. - // If your requestOBJ contains data for only one table at a time, this loop - // might need adjustment - // to fetch data for each table in the group. - // For now, it will use the syncData from the original requestOBJ, which implies - // the original requestOBJ should represent data for a single table. - // A more robust solution would involve fetching data for each table - // dynamically. + boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), table, syncUploadDataDigester); if (!currentTableSyncResult) { @@ -176,9 +148,7 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E errorMessage += "Failed to sync table: " + table + " in Group " + groupId + ". "; logger.error("Sync failed for table '{}' in Group {}. Error: {}", table, groupId, errorMessage); - // Optionally, you can choose to break here or continue to sync other tables in - // the group/next group - // For now, let's continue to attempt other tables within the group. + } else { logger.info("Successfully synced table: {} in Group {}", table, groupId); } @@ -188,7 +158,6 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E + e.getMessage() + ". "; logger.error("Exception during sync for table '{}' in Group {}: {}", table, groupId, e.getMessage(), e); - // Continue to attempt other tables } } } @@ -202,41 +171,22 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E } } - /** - * Helper method to sync tables belonging to a specific group. - * This method assumes that the `syncUploadDataDigester` will be populated - * with relevant data for the `currentTableName` before calling this. - * In a real-world scenario, you might fetch data for each table here. - */ private boolean syncTablesInGroup(String schemaName, String currentTableName, SyncUploadDataDigester originalDigester) { - logger.info("Attempting generic sync for table: {}", currentTableName); - // This is a simplification. In a production system, you would likely need - // to retrieve the actual data for 'currentTableName' from the local DB - // based on the group sync approach. For this example, we'll assume the - // originalDigester's syncData is relevant or needs to be re-populated. - - // Create a new digester instance or modify the existing one for the current - // table SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); tableSpecificDigester.setSchemaName(schemaName); tableSpecificDigester.setTableName(currentTableName); tableSpecificDigester.setSyncedBy(originalDigester.getSyncedBy()); tableSpecificDigester.setFacilityID(originalDigester.getFacilityID()); tableSpecificDigester.setVanAutoIncColumnName(originalDigester.getVanAutoIncColumnName()); - tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); // Assuming serverColumns is - // generic or set per table - - // !!! IMPORTANT: You'll need to fetch the data for 'currentTableName' from your local DB here. - // The `originalDigester.getSyncData()` might not be correct for all tables in a group. - // For demonstration, I'm just using the original digester's data, which is likely incorrect + tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); tableSpecificDigester.setSyncData(originalDigester.getSyncData()); return performGenericTableSync(tableSpecificDigester); } private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID( SyncUploadDataDigester syncUploadDataDigester) { - + List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncData = new ArrayList<>(); @@ -261,7 +211,8 @@ private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID( if (!syncData.isEmpty()) { try { int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(), syncUploadDataDigester.getServerColumns(), query, syncData); + syncUploadDataDigester.getTableName(), syncUploadDataDigester.getServerColumns(), query, + syncData); if (i.length == syncData.size()) { logger.info("Successfully updated {} records for m_beneficiaryregidmapping.", i.length); @@ -309,10 +260,10 @@ public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDat String problematicWhereClause = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // Define it explicitly - while (true) { - List> batch; - try { - + while (true) { + List> batch; + try { + batch = dataSyncRepositoryCentral.getBatchForBenDetails( syncUploadDataDigester, problematicWhereClause, @@ -350,210 +301,215 @@ private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableN * Handles the generic synchronization logic for tables not covered by specific * handlers. */ - private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester) { - List> dataToBesync = syncUploadDataDigester.getSyncData(); - List syncDataListInsert = new ArrayList<>(); - List syncDataListUpdate = new ArrayList<>(); - if (dataToBesync == null || dataToBesync.isEmpty()) { - logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); - return true; // Nothing to sync, consider it a success - } + private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester) { + List> dataToBesync = syncUploadDataDigester.getSyncData(); + List syncDataListInsert = new ArrayList<>(); + List syncDataListUpdate = new ArrayList<>(); - String syncTableName = syncUploadDataDigester.getTableName(); - String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); - String schemaName = syncUploadDataDigester.getSchemaName(); - Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); + if (dataToBesync == null || dataToBesync.isEmpty()) { + logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); + return true; // Nothing to sync, consider it a success + } - for (Map map : dataToBesync) { - String vanSerialNo = String.valueOf(map.get(vanAutoIncColumnName)); - String vanID = String.valueOf(map.get("VanID")); - int syncFacilityID = 0; - - // Update SyncedBy and SyncedDate in the xmap itself before processing - map.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); - map.put("SyncedDate", String.valueOf(LocalDateTime.now())); // Ensure column name matches DB - if (map.get("CreatedDate") == null || map.get("created_date") == null) { - logger.info("CreatedDate was null for table: " + syncTableName + ", inserting current time"); - if(map.get("CreatedDate") == null) - map.put("CreatedDate", String.valueOf(LocalDateTime.now())); - if(map.get("created_date") == null) - map.put("created_date", String.valueOf(LocalDateTime.now())); + String syncTableName = syncUploadDataDigester.getTableName(); + String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); + String schemaName = syncUploadDataDigester.getSchemaName(); + Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); + String serverColumns = syncUploadDataDigester.getServerColumns(); + + List serverColumnsList = Arrays.asList(serverColumns.split(",")); + + for (Map map : dataToBesync) { + // Create a new map with clean column names as keys + Map cleanRecord = new HashMap<>(); + for (String key : map.keySet()) { + String cleanKey = key; + // Handle keys with SQL functions like date_format + if (key.startsWith("date_format(") && key.endsWith(")")) { + int start = key.indexOf("(") + 1; + int end = key.indexOf(","); + if (end > start) { + cleanKey = key.substring(start, end).trim(); + } else { + // Fallback if format is unexpected + cleanKey = key.substring(start, key.indexOf(")")).trim(); } - // Facility ID processing - if (facilityIDFromDigester != null) { - // Determine the 'Processed' status based on facility ID for specific tables - switch (syncTableName.toLowerCase()) { - case "t_indent": - case "t_indentorder": { - if (map.containsKey("FromFacilityID") && map.get("FromFacilityID") instanceof Double) { - Double fromFacilityID = (Double) map.get("FromFacilityID"); - if (fromFacilityID.intValue() == facilityIDFromDigester) { - map.put("Processed", "P"); - } + } + cleanRecord.put(cleanKey.trim(), map.get(key)); + } + + String vanSerialNo = String.valueOf(cleanRecord.get(vanAutoIncColumnName)); + String vanID = String.valueOf(cleanRecord.get("VanID")); + int syncFacilityID = 0; + + // Update SyncedBy and SyncedDate in the xmap itself before processing + cleanRecord.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); + cleanRecord.put("SyncedDate", String.valueOf(LocalDateTime.now())); + + if (facilityIDFromDigester != null) { + // Determine the 'Processed' status based on facility ID for specific tables + switch (syncTableName.toLowerCase()) { + case "t_indent": + case "t_indentorder": { + if (cleanRecord.containsKey("FromFacilityID") && cleanRecord.get("FromFacilityID") instanceof Number) { + Number fromFacilityID = (Number) cleanRecord.get("FromFacilityID"); + if (fromFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); } - break; } - case "t_indentissue": { - if (map.containsKey("ToFacilityID") && map.get("ToFacilityID") instanceof Double) { - Double toFacilityID = (Double) map.get("ToFacilityID"); - if (toFacilityID.intValue() == facilityIDFromDigester) { - map.put("Processed", "P"); - } + break; + } + case "t_indentissue": { + if (cleanRecord.containsKey("ToFacilityID") && cleanRecord.get("ToFacilityID") instanceof Number) { + Number toFacilityID = (Number) cleanRecord.get("ToFacilityID"); + if (toFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); } - break; } - case "t_stocktransfer": { - if (map.containsKey("TransferToFacilityID") - && map.get("TransferToFacilityID") instanceof Double) { - Double transferToFacilityID = (Double) map.get("TransferToFacilityID"); - if (transferToFacilityID.intValue() == facilityIDFromDigester) { - map.put("Processed", "P"); - } + break; + } + case "t_stocktransfer": { + if (cleanRecord.containsKey("TransferToFacilityID") + && cleanRecord.get("TransferToFacilityID") instanceof Number) { + Number transferToFacilityID = (Number) cleanRecord.get("TransferToFacilityID"); + if (transferToFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); } - break; } - case "t_itemstockentry": { - if (map.containsKey("FacilityID") && map.get("FacilityID") instanceof Double) { - Double mapFacilityID = (Double) map.get("FacilityID"); - if (mapFacilityID.intValue() == facilityIDFromDigester) { - map.put("Processed", "P"); - } + break; + } + case "t_itemstockentry": { + if (cleanRecord.containsKey("FacilityID") && cleanRecord.get("FacilityID") instanceof Number) { + Number mapFacilityID = (Number) cleanRecord.get("FacilityID"); + if (mapFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); } - break; } - default: - // No specific facility ID logic for other tables, maintain existing 'Processed' - // status or default - break; + break; } + default: + // No specific facility ID logic for other tables + break; } + } - // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot - if (map.containsKey("SyncFacilityID") && map.get("SyncFacilityID") instanceof Integer) { - syncFacilityID = (Integer) map.get("SyncFacilityID"); - } else if (map.containsKey("SyncFacilityID") && map.get("SyncFacilityID") instanceof Double) { - syncFacilityID = ((Double) map.get("SyncFacilityID")).intValue(); - } + // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot + if (cleanRecord.containsKey("SyncFacilityID") && cleanRecord.get("SyncFacilityID") instanceof Number) { + syncFacilityID = ((Number) cleanRecord.get("SyncFacilityID")).intValue(); + } - int recordCheck; - try { - recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( - schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); - } catch (Exception e) { - logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", - syncTableName, vanSerialNo, vanID, e.getMessage(), e); - return false; // Critical error, stop sync for this table - } + int recordCheck; + try { + recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( + schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); + } catch (Exception e) { + logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", + syncTableName, vanSerialNo, vanID, e.getMessage(), e); + return false; // Critical error, stop sync for this table + } - // Prepare Object array for insert/update - Object[] objArr; - List serverColumnsList = Arrays.asList(syncUploadDataDigester.getServerColumns().split(",")); - List currentRecordValues = new ArrayList<>(); - - for (String column : serverColumnsList) { - Object value = map.get(column.trim()); - // Handle boolean conversion if necessary, though String.valueOf should - // generally work for prepared statements - if (value instanceof Boolean) { - currentRecordValues.add(value); - } else if (value != null) { - currentRecordValues.add(String.valueOf(value)); - } else { - currentRecordValues.add(null); - } + // Prepare Object array for insert/update + List currentRecordValues = new ArrayList<>(); + for (String column : serverColumnsList) { + Object value = cleanRecord.get(column.trim()); + if (value instanceof Boolean) { + currentRecordValues.add(value); + } else if (value != null) { + currentRecordValues.add(String.valueOf(value)); + } else { + currentRecordValues.add(null); } + } - objArr = currentRecordValues.toArray(); - - if (recordCheck == 0) { - syncDataListInsert.add(objArr); + Object[] objArr = currentRecordValues.toArray(); + if (recordCheck == 0) { + syncDataListInsert.add(objArr); + } else { + // For update, append the WHERE clause parameters at the end of the array + List updateParams = new ArrayList<>(Arrays.asList(objArr)); + updateParams.add(String.valueOf(vanSerialNo)); + + if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(syncTableName.toLowerCase()) && cleanRecord.containsKey("SyncFacilityID")) { + updateParams.add(String.valueOf(cleanRecord.get("SyncFacilityID"))); } else { - // For update, append the WHERE clause parameters at the end of the array - List updateParams = new ArrayList<>(Arrays.asList(objArr)); - updateParams.add(String.valueOf(vanSerialNo)); - - if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", - "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", - "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") - .contains(syncTableName.toLowerCase()) && map.containsKey("SyncFacilityID")) { - updateParams.add(String.valueOf(map.get("SyncFacilityID"))); - } else { - updateParams.add(String.valueOf(vanID)); - } - syncDataListUpdate.add(updateParams.toArray()); + updateParams.add(String.valueOf(vanID)); } + syncDataListUpdate.add(updateParams.toArray()); } + } - boolean insertSuccess = true; - boolean updateSuccess = true; + boolean insertSuccess = true; + boolean updateSuccess = true; - if (!syncDataListInsert.isEmpty()) { - String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, syncUploadDataDigester.getServerColumns()); - - try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, - syncUploadDataDigester.getServerColumns(), queryInsert, syncDataListInsert); - if (i.length != syncDataListInsert.size()) { - insertSuccess = false; - logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", - syncTableName, syncDataListInsert.size(), i.length, - getFailedRecords(i, syncDataListInsert)); - } else { - logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); - } - } catch (Exception e) { + if (!syncDataListInsert.isEmpty()) { + String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); + + try { + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + serverColumns, queryInsert, syncDataListInsert); + if (i.length != syncDataListInsert.size()) { insertSuccess = false; - logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); + logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", + syncTableName, syncDataListInsert.size(), i.length, + getFailedRecords(i, syncDataListInsert)); + } else { + logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); } + } catch (Exception e) { + insertSuccess = false; + logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); } + } - if (!syncDataListUpdate.isEmpty()) { - String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, syncUploadDataDigester.getServerColumns(), syncTableName); - // Ensure the update query is correct and matches the expected format - try { - int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, - SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); - if (j.length != syncDataListUpdate.size()) { - updateSuccess = false; - logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", - syncTableName, syncDataListUpdate.size(), j.length, - getFailedRecords(j, syncDataListUpdate)); - } else { - logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); - } - } catch (Exception e) { + if (!syncDataListUpdate.isEmpty()) { + String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); + try { + int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); + if (j.length != syncDataListUpdate.size()) { updateSuccess = false; - logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); + logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", + syncTableName, syncDataListUpdate.size(), j.length, + getFailedRecords(j, syncDataListUpdate)); + } else { + logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); } + } catch (Exception e) { + updateSuccess = false; + logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); } - return insertSuccess && updateSuccess; + } + return insertSuccess && updateSuccess; +} + private String getQueryToInsertDataToServerDB(String schemaName, String + tableName, String serverColumns) { + String[] columnsArr = null; + if (serverColumns != null) + columnsArr = serverColumns.split(","); + + StringBuilder preparedStatementSetter = new StringBuilder(); + + if (columnsArr != null && columnsArr.length > 0) { + for (int i = 0; i < columnsArr.length; i++) { + preparedStatementSetter.append("?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } + } } - private String getQueryToInsertDataToServerDB(String schemaName, String tableName, String serverColumns) { - String[] columnsArr = null; - if (serverColumns != null) - columnsArr = serverColumns.split(","); - - StringBuilder preparedStatementSetter = new StringBuilder(); - - if (columnsArr != null && columnsArr.length > 0) { - for (int i = 0; i < columnsArr.length; i++) { - preparedStatementSetter.append("?"); - if (i < columnsArr.length - 1) { - preparedStatementSetter.append(", "); - } - } - } - - StringBuilder queryBuilder = new StringBuilder("INSERT INTO "); - queryBuilder.append(schemaName).append(".").append(tableName); - queryBuilder.append("("); - queryBuilder.append(serverColumns); - queryBuilder.append(") VALUES ("); - queryBuilder.append(preparedStatementSetter); - queryBuilder.append(")"); - return queryBuilder.toString(); + StringBuilder queryBuilder = new StringBuilder("INSERT INTO "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append("("); + queryBuilder.append(serverColumns); + queryBuilder.append(") VALUES ("); + queryBuilder.append(preparedStatementSetter); + queryBuilder.append(")"); + return queryBuilder.toString(); } public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { @@ -563,24 +519,6 @@ public String getQueryToUpdateDataToServerDB(String schemaName, String serverCol StringBuilder preparedStatementSetter = new StringBuilder(); - if (columnsArr != null && columnsArr.length > 0) { - for (int i = 0; i < columnsArr.length; i++) { - String columnName = columnsArr[i].trim(); // ← NEW LINE - - // Special handling for CreatedDate - use COALESCE to prevent NULL - if (columnName.equalsIgnoreCase("CreatedDate")) { // ← NEW BLOCK - preparedStatementSetter.append(columnName); - preparedStatementSetter.append(" = COALESCE(?, CURRENT_TIMESTAMP)"); - } else { - preparedStatementSetter.append(columnName); - preparedStatementSetter.append(" = ?"); - } - - if (i < columnsArr.length - 1) { - preparedStatementSetter.append(", "); - } - } - } StringBuilder queryBuilder = new StringBuilder(" UPDATE "); queryBuilder.append(schemaName).append(".").append(tableName); queryBuilder.append(" SET "); @@ -619,4 +557,6 @@ private String getFailedRecords(int[] results, List data) { logger.info("Failed records info: {}", failedRecordsInfo); return String.join("; ", failedRecordsInfo); } + + } \ No newline at end of file From 62a5a18b9d1f2a73c7d5ee47e08a3a54e3cdd0b9 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Tue, 19 Aug 2025 22:19:09 +0530 Subject: [PATCH 20/23] Fix the token issue for Ben-gen id generation (#114) * fix: update server authorization for bengen * fix: update server authorization for bengen --- .../service/dataSyncActivity/DownloadDataFromServerImpl.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java index afb42a4b..dd389c38 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java @@ -333,7 +333,7 @@ public int callCentralAPIToGenerateBenIDAndimportToLocal(String requestOBJ, Stri int i = 0; // Rest template RestTemplate restTemplate = new RestTemplate(); - HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization,token); + HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, ServerAuthorization,"datasync"); // Call rest-template to call central API to generate UNIQUE ID at central ResponseEntity response = restTemplate.exchange(benGenUrlCentral, HttpMethod.POST, request, String.class); @@ -342,7 +342,7 @@ public int callCentralAPIToGenerateBenIDAndimportToLocal(String requestOBJ, Stri JSONObject obj = new JSONObject(response.getBody()); if (obj != null && obj.has("data") && obj.has("statusCode") && obj.getInt("statusCode") == 200) { // Consume the response from API and call local identity api to save data - HttpEntity request1 = RestTemplateUtil.createRequestEntity(obj.get("data").toString(), Authorization, token); + HttpEntity request1 = RestTemplateUtil.createRequestEntity(obj.get("data").toString(), ServerAuthorization, "datasync"); i = 1; // Call rest-template to call central API to generate UNIQUE ID at central ResponseEntity response1 = restTemplate.exchange(benImportUrlLocal, HttpMethod.POST, request1, From d592f6a5884f9f412414b3f535b18b9d004b6599 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 20 Aug 2025 10:59:03 +0530 Subject: [PATCH 21/23] fix: replace authorization for local api call (#116) --- .../service/dataSyncActivity/DownloadDataFromServerImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java index dd389c38..13070e18 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java @@ -342,7 +342,7 @@ public int callCentralAPIToGenerateBenIDAndimportToLocal(String requestOBJ, Stri JSONObject obj = new JSONObject(response.getBody()); if (obj != null && obj.has("data") && obj.has("statusCode") && obj.getInt("statusCode") == 200) { // Consume the response from API and call local identity api to save data - HttpEntity request1 = RestTemplateUtil.createRequestEntity(obj.get("data").toString(), ServerAuthorization, "datasync"); + HttpEntity request1 = RestTemplateUtil.createRequestEntity(obj.get("data").toString(), Authorization, "datasync"); i = 1; // Call rest-template to call central API to generate UNIQUE ID at central ResponseEntity response1 = restTemplate.exchange(benImportUrlLocal, HttpMethod.POST, request1, From 26bf74c1fb1455a7f36aeae89a6bb1bdb61ffd62 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 20 Aug 2025 15:33:47 +0530 Subject: [PATCH 22/23] fix: add logs (#117) --- .../service/dataSyncActivity/DownloadDataFromServerImpl.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java index 13070e18..dbaadbad 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java @@ -337,11 +337,13 @@ public int callCentralAPIToGenerateBenIDAndimportToLocal(String requestOBJ, Stri // Call rest-template to call central API to generate UNIQUE ID at central ResponseEntity response = restTemplate.exchange(benGenUrlCentral, HttpMethod.POST, request, String.class); - +logger.info("Respponse from central API: " + response); +logger.info("Import url="+benImportUrlLocal); if (response != null && response.hasBody()) { JSONObject obj = new JSONObject(response.getBody()); if (obj != null && obj.has("data") && obj.has("statusCode") && obj.getInt("statusCode") == 200) { // Consume the response from API and call local identity api to save data + HttpEntity request1 = RestTemplateUtil.createRequestEntity(obj.get("data").toString(), Authorization, "datasync"); i = 1; // Call rest-template to call central API to generate UNIQUE ID at central From 9f7fb1cc69c89bf63b41dc3fbc426f303a370a61 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Fri, 22 Aug 2025 11:14:32 +0530 Subject: [PATCH 23/23] Fix the BenGen ID Issue (#118) * fix: add logs to check the identity-api * fix: add logs * fix: add logs --- .../dataSyncActivity/StartSyncActivity.java | 5 +++-- .../DownloadDataFromServerImpl.java | 21 ++++++++++++------- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/src/main/java/com/iemr/mmu/controller/dataSyncActivity/StartSyncActivity.java b/src/main/java/com/iemr/mmu/controller/dataSyncActivity/StartSyncActivity.java index 5f08fa9d..1e43ed66 100644 --- a/src/main/java/com/iemr/mmu/controller/dataSyncActivity/StartSyncActivity.java +++ b/src/main/java/com/iemr/mmu/controller/dataSyncActivity/StartSyncActivity.java @@ -25,7 +25,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; - import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; @@ -37,8 +36,8 @@ import com.iemr.mmu.service.dataSyncActivity.DownloadDataFromServerImpl; import com.iemr.mmu.service.dataSyncActivity.DownloadDataFromServerTransactionalImpl; import com.iemr.mmu.service.dataSyncActivity.UploadDataToServerImpl; -import com.iemr.mmu.utils.response.OutputResponse; import com.iemr.mmu.utils.CookieUtil; +import com.iemr.mmu.utils.response.OutputResponse; import io.swagger.v3.oas.annotations.Operation; import jakarta.servlet.http.HttpServletRequest; @@ -181,6 +180,8 @@ public String callCentralAPIToGenerateBenIDAndimportToLocal(@RequestBody String OutputResponse response = new OutputResponse(); try { String jwtToken = CookieUtil.getJwtTokenFromCookie(request); + logger.info("Authorization from controller="+ authorization); + int i = downloadDataFromServerImpl.callCentralAPIToGenerateBenIDAndimportToLocal(requestOBJ, authorization, serverAuthorization, jwtToken); if (i == 0) { diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java index dbaadbad..6e7b1c91 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java @@ -330,25 +330,29 @@ public Map getDownloadStatus() { public int callCentralAPIToGenerateBenIDAndimportToLocal(String requestOBJ, String Authorization, String ServerAuthorization, String token) throws Exception { - int i = 0; + int i = 0, i1 = 0; + try{ // Rest template RestTemplate restTemplate = new RestTemplate(); HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, ServerAuthorization,"datasync"); // Call rest-template to call central API to generate UNIQUE ID at central ResponseEntity response = restTemplate.exchange(benGenUrlCentral, HttpMethod.POST, request, String.class); -logger.info("Respponse from central API: " + response); -logger.info("Import url="+benImportUrlLocal); + logger.info("Authorization before calling local api="+Authorization); + logger.info("Import url="+benImportUrlLocal); if (response != null && response.hasBody()) { JSONObject obj = new JSONObject(response.getBody()); if (obj != null && obj.has("data") && obj.has("statusCode") && obj.getInt("statusCode") == 200) { // Consume the response from API and call local identity api to save data - HttpEntity request1 = RestTemplateUtil.createRequestEntity(obj.get("data").toString(), Authorization, "datasync"); + logger.info("Authorization: " + Authorization); + logger.info("ServerAuthorization: " + ServerAuthorization); + HttpEntity request1 = RestTemplateUtil.createRequestEntity(obj.get("data").toString(), Authorization, token); i = 1; - // Call rest-template to call central API to generate UNIQUE ID at central + logger.info("Request to benImporturllocal: " + request1); ResponseEntity response1 = restTemplate.exchange(benImportUrlLocal, HttpMethod.POST, request1, String.class); + logger.info("Response from benImportUrlLocal: " + response1); if (response1 != null && response1.hasBody()) { JSONObject obj1 = new JSONObject(response1.getBody()); if (obj1 != null && obj1.has("data") && obj1.has("statusCode") @@ -357,9 +361,12 @@ public int callCentralAPIToGenerateBenIDAndimportToLocal(String requestOBJ, Stri } } - } + } } - + } catch (Exception e) { + logger.error("Error while generating catch UNIQUE_ID at central server: " + e.getMessage()); + throw new Exception("Error while generating catch UNIQUE_ID at central server: " + e.getMessage()); + } return i; } }