From a63271e142b4ae26d3ac31c2f108c661e09fae29 Mon Sep 17 00:00:00 2001 From: Mahmoud Date: Mon, 17 Aug 2020 14:19:35 +1000 Subject: [PATCH 01/19] add images to dwca-export --- .../ala/biocache/export/DwCAExporter.scala | 108 ++++++------------ 1 file changed, 34 insertions(+), 74 deletions(-) diff --git a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala index 28ec6aae0..fda382336 100644 --- a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala +++ b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala @@ -12,10 +12,11 @@ import org.apache.commons.io.{FileUtils, IOUtils} import org.apache.commons.lang.StringUtils import org.slf4j.LoggerFactory -import scala.collection.mutable +import scala.collection.{immutable, mutable} import scala.collection.mutable.ListBuffer import scala.io.Source import scala.util.parsing.json.JSON +import scala.xml.Elem /** * Companion object for the DwCAExporter class. @@ -224,7 +225,6 @@ object DwCAExporter extends Tool { (Map.empty[String, String], Map.empty[String, String]) } - if (!dr.isEmpty && resourceIDs.contains(dr) && dateDeleted.isEmpty) { // Record is not deleted val dataResourceMap = dataResource2OutputStreams.get(dr) if (!dataResourceMap.isEmpty && !dataResourceMap.get.isEmpty) { @@ -402,17 +402,7 @@ class DwCAExporter(fieldList: mutable.LinkedHashMap[String, String]) { def addMeta(zop: ZipOutputStream) = { zop.putNextEntry(new ZipEntry("meta.xml")) val fieldsSeq = (fieldList - "dataResourceUid" - "classs" - "rowkey").keySet.toIndexedSeq - val metaXml = - - - occurrence.csv - - {fieldsSeq.zipWithIndex.map { - case (field, index) => - - }} - - + val metaXml: Elem = buildCoreMetaXml(fieldsSeq, List("Multimedia")) //add the XML zop.write("""""".getBytes) zop.write("\n".getBytes) @@ -421,73 +411,43 @@ class DwCAExporter(fieldList: mutable.LinkedHashMap[String, String]) { zop.closeEntry } - def addMetaWithMultimedia(zop: ZipOutputStream) = { - zop.putNextEntry(new ZipEntry("meta.xml")) + private def buildCoreMetaXml(fieldsSeq: immutable.IndexedSeq[String], extensions: List[String]) = { val metaXml = occurrence.csv - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - image.csv - - - - - - - - - - - - - + {fieldsSeq.zipWithIndex.map { + case (field, index) => + + }} + {extensions.map { + case "Multimedia" => + + + image.csv + + + + + + + + + + + + + + }} + metaXml + } + + def addMetaWithMultimedia(zop: ZipOutputStream) = { + zop.putNextEntry(new ZipEntry("meta.xml")) + val fieldsSeq = (fieldList - "dataResourceUid" - "classs" - "rowkey").keySet.toIndexedSeq + val metaXml: Elem = buildCoreMetaXml(fieldsSeq, List("Multimedia")) //add the XML zop.write("""""".getBytes) zop.write("\n".getBytes) From b849aa0bd67af3e7f411ab3c2ad9a3671e487854 Mon Sep 17 00:00:00 2001 From: Mahmoud Date: Thu, 20 Aug 2020 21:08:21 +1000 Subject: [PATCH 02/19] Fixed the bug for AtlasOfLivingAustralia/la-pipelines#130 --- .../ala/biocache/export/DwCAExporter.scala | 29 ++++++------------- 1 file changed, 9 insertions(+), 20 deletions(-) diff --git a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala index fda382336..3b2ab510c 100644 --- a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala +++ b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala @@ -369,7 +369,7 @@ class DwCAExporter(fieldList: mutable.LinkedHashMap[String, String]) { FileUtils.forceMkdir(zipFile.getParentFile) val zop = new ZipOutputStream(new FileOutputStream(zipFile)) if (addEML(zop, dataResource)) { - addMeta(zop) + addMeta(zop, List()) zop.putNextEntry(new ZipEntry("occurrence.csv")) val occWriter = new CSVWriter(new OutputStreamWriter(zop), ',', '"', lineEnd) Some((zop, occWriter)) @@ -398,20 +398,7 @@ class DwCAExporter(fieldList: mutable.LinkedHashMap[String, String]) { false } } - - def addMeta(zop: ZipOutputStream) = { - zop.putNextEntry(new ZipEntry("meta.xml")) - val fieldsSeq = (fieldList - "dataResourceUid" - "classs" - "rowkey").keySet.toIndexedSeq - val metaXml: Elem = buildCoreMetaXml(fieldsSeq, List("Multimedia")) - //add the XML - zop.write("""""".getBytes) - zop.write("\n".getBytes) - zop.write(metaXml.mkString("\n").getBytes) - zop.flush - zop.closeEntry - } - - private def buildCoreMetaXml(fieldsSeq: immutable.IndexedSeq[String], extensions: List[String]) = { + private def buildMetaXml(fieldsSeq: immutable.IndexedSeq[String], extensions: List[String]) = { val metaXml = @@ -439,15 +426,16 @@ class DwCAExporter(fieldList: mutable.LinkedHashMap[String, String]) { + case _ => }} metaXml } - def addMetaWithMultimedia(zop: ZipOutputStream) = { + def addMeta(zop: ZipOutputStream, extensions: List[String]) = { zop.putNextEntry(new ZipEntry("meta.xml")) val fieldsSeq = (fieldList - "dataResourceUid" - "classs" - "rowkey").keySet.toIndexedSeq - val metaXml: Elem = buildCoreMetaXml(fieldsSeq, List("Multimedia")) + val metaXml: Elem = buildMetaXml(fieldsSeq, extensions) //add the XML zop.write("""""".getBytes) zop.write("\n".getBytes) @@ -456,6 +444,7 @@ class DwCAExporter(fieldList: mutable.LinkedHashMap[String, String]) { zop.closeEntry } + /** * Retrieves an archive from the image service and then appends contents to * existing created archives. @@ -528,10 +517,10 @@ class DwCAExporter(fieldList: mutable.LinkedHashMap[String, String]) { //find the archive.... val archivePath = archivesPath + "/" + dataResourceUid + "/" + dataResourceUid + ".zip" - val archive = new File(archivesPath + "/" + dataResourceUid + "/" + dataResourceUid + ".zip") + val archive = new File(archivePath) if (archive.exists()) { - val backupArchive = new File(archivesPath + "/" + dataResourceUid + "/" + dataResourceUid + ".zip.backup") + val backupArchive = new File(archivePath + ".backup") if (backupArchive.exists()) { backupArchive.delete() } @@ -547,7 +536,7 @@ class DwCAExporter(fieldList: mutable.LinkedHashMap[String, String]) { addEML(zop, dataResourceUid) //add meta.xml - with multimedia extension - addMetaWithMultimedia(zop) + addMeta(zop, List("Multimedia")) //add images CSV zop.putNextEntry(new ZipEntry("image.csv")) From 83029e55672360ec9189b85484d818a74e1deb3f Mon Sep 17 00:00:00 2001 From: Reuben Roberts <68224960+ReubenRobertsNBN@users.noreply.github.com> Date: Mon, 31 Aug 2020 13:34:00 +0100 Subject: [PATCH 03/19] Fix to include last assertion in indexing The issue with the existing code is that is seems to skip the last entry in the JSON, because there is no '{' following this one. Possible fix in code. --- src/main/scala/au/org/ala/biocache/index/SolrIndexDAO.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/scala/au/org/ala/biocache/index/SolrIndexDAO.scala b/src/main/scala/au/org/ala/biocache/index/SolrIndexDAO.scala index 187d36ab8..f60d69cf6 100644 --- a/src/main/scala/au/org/ala/biocache/index/SolrIndexDAO.scala +++ b/src/main/scala/au/org/ala/biocache/index/SolrIndexDAO.scala @@ -1299,8 +1299,11 @@ class SolrIndexDAO @Inject()(@Named("solr.home") solrHome: String, all.remove(AssertionCodes.PROCESSING_ERROR) all.remove(AssertionCodes.VERIFIED) - while (end > 2) { + while (end > 2 && i < jsonString.length()) { end = jsonString.indexOf('{', i + 1) + if (end < 0) { //last one, so there is no following '{' + end = jsonString.length() + } var codePos = jsonString.indexOf("\"code\":", i) var qaStatusPos = jsonString.indexOf("\"qaStatus\":", i) From f10b3ca9067e3639708e1f2f7ec0c95b4147f2cc Mon Sep 17 00:00:00 2001 From: Patricia Koh Date: Tue, 1 Sep 2020 12:18:43 +1000 Subject: [PATCH 04/19] Bump up to next snapshot version --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index f81dc0a15..e197d5720 100644 --- a/pom.xml +++ b/pom.xml @@ -9,7 +9,7 @@ au.org.ala biocache-store - 2.6.0 + 2.6.1-SNAPSHOT https://biocache.ala.org.au GitHub From 783e715a6b3c3f521b4dbc9964dc161c241744d8 Mon Sep 17 00:00:00 2001 From: Mahmoud Date: Tue, 13 Oct 2020 14:03:44 +1100 Subject: [PATCH 05/19] Fix for AtlasOfLivingAustralia/la-pipelines#177 identifierBy is a typo of identifiedBy. --- src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala index 3b2ab510c..96cdb6626 100644 --- a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala +++ b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala @@ -233,7 +233,11 @@ object DwCAExporter extends Tool { val resultMap = map.filter(_._2 != null).map({ (entry) => entry._1 match { case "class" => + // class field in dwca can include the combincation of the following fields if the prior field is empty : class, classs, and _class (entry._1, originalProperties.getOrElse(entry._1, if (!entry._2.isEmpty()) entry._2; else if (!map.getOrElse("classs", "").isEmpty) map.getOrElse("classs", ""); else map.getOrElse("_class", ""))) + case "identifiedBy" => + // it will include identifierBy field if the identifiedBy is empty + (entry._1, originalProperties.getOrElse(entry._1, if (!entry._2.isEmpty()) entry._2; else map.getOrElse("identifierBy", ""))) case "miscProperties" => if (originalMiscProperties.isEmpty) ("dynamicProperties", entry._2) From ee4f7ec1144681222c797111db241f88a5df5c72 Mon Sep 17 00:00:00 2001 From: Peter Ansell Date: Wed, 14 Oct 2020 10:09:32 +1100 Subject: [PATCH 06/19] Upgrade parent pom --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index e197d5720..d8a15f5f6 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ au.org.ala ala-parent-pom - 11 + 14 au.org.ala @@ -348,7 +348,7 @@ junit junit - 4.12 + 4.13.1 test From f13ea1e5345da4dce086b0c15edc4db6d1bd2a97 Mon Sep 17 00:00:00 2001 From: Mahmoud Date: Tue, 20 Oct 2020 10:59:02 +1100 Subject: [PATCH 07/19] Adding Easting, Northing and Zone related the issue is : atlasoflivingaustralia/la-pipelines#164 --- src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala index 96cdb6626..4a7812b4f 100644 --- a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala +++ b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala @@ -63,6 +63,7 @@ object DwCAExporter extends Tool { "decimalLongitude" -> "http://rs.tdwg.org/dwc/terms/decimalLongitude", "disposition" -> "http://rs.tdwg.org/dwc/terms/disposition", "dynamicProperties" -> "http://rs.tdwg.org/dwc/terms/dynamicProperties", + "easting" -> "http://rs.ala.org.au/terms/1.0/easting", "endDayOfYear" -> "http://rs.tdwg.org/dwc/terms/endDayOfYear", "establishmentMeans" -> "http://rs.tdwg.org/dwc/terms/establishmentMeans", "eventAttributes" -> "http://rs.tdwg.org/dwc/terms/eventAttributes", @@ -136,6 +137,7 @@ object DwCAExporter extends Tool { "namePublishedInYear" -> "http://rs.tdwg.org/dwc/terms/namePublishedInYear", "nomenclaturalCode" -> "http://rs.tdwg.org/dwc/terms/nomenclaturalCode", "nomenclaturalStatus" -> "http://rs.tdwg.org/dwc/terms/nomenclaturalStatus", + "northing" -> "http://rs.ala.org.au/terms/1.0/northing", "occurrenceAttributes" -> "http://rs.tdwg.org/dwc/terms/occurrenceAttributes", "occurrenceDetails" -> "http://rs.tdwg.org/dwc/terms/occurrenceDetails", "occurrenceID" -> "http://rs.tdwg.org/dwc/terms/occurrenceID", @@ -194,7 +196,8 @@ object DwCAExporter extends Tool { "verbatimTaxonRank" -> "http://rs.tdwg.org/dwc/terms/verbatimTaxonRank", "vernacularName" -> "http://rs.tdwg.org/dwc/terms/vernacularName", "waterBody" -> "http://rs.tdwg.org/dwc/terms/waterBody", - "year" -> "http://rs.tdwg.org/dwc/terms/year" + "year" -> "http://rs.tdwg.org/dwc/terms/year", + "zone" -> "http://rs.ala.org.au/terms/1.0/zone" ) var resourceUid = "" From 2ec882e67e24c98837560ba6c7498b5d9f1348ec Mon Sep 17 00:00:00 2001 From: Mahmoud Date: Tue, 20 Oct 2020 13:55:37 +1100 Subject: [PATCH 08/19] Missed Dublin Core fields are added AtlasOfLivingAustralia/la-pipelines#165 --- src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala index 4a7812b4f..85e023cb6 100644 --- a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala +++ b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala @@ -166,6 +166,7 @@ object DwCAExporter extends Tool { "reproductiveCondition" -> "http://rs.tdwg.org/dwc/terms/reproductiveCondition", "resourceID" -> "http://rs.tdwg.org/dwc/terms/resourceID", "resourceRelationshipID" -> "http://rs.tdwg.org/dwc/terms/resourceRelationshipID", + "rights" -> "http://purl.org/dc/terms/rights", "rightsHolder" -> "http://purl.org/dc/terms/rightsHolder", "samplingEffort" -> "http://rs.tdwg.org/dwc/terms/samplingEffort", "samplingProtocol" -> "http://rs.tdwg.org/dwc/terms/samplingProtocol", @@ -173,6 +174,7 @@ object DwCAExporter extends Tool { "scientificNameAuthorship" -> "http://rs.tdwg.org/dwc/terms/scientificNameAuthorship", "scientificNameID" -> "http://rs.tdwg.org/dwc/terms/scientificNameID", "sex" -> "http://rs.tdwg.org/dwc/terms/sex", + "source" -> "http://purl.org/dc/terms/source", "specificEpithet" -> "http://rs.tdwg.org/dwc/terms/specificEpithet", "startDayOfYear" -> "http://rs.tdwg.org/dwc/terms/startDayOfYear", "stateProvince" -> "http://rs.tdwg.org/dwc/terms/stateProvince", From 5817daff9b87833e58c42f96396e2a4d9137833b Mon Sep 17 00:00:00 2001 From: Mahmoud Date: Tue, 20 Oct 2020 16:31:50 +1100 Subject: [PATCH 09/19] ABCD Identifiers added Fix for AtlasOfLivingAustralia/la-pipelines#173 --- src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala index 85e023cb6..f321421b1 100644 --- a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala +++ b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala @@ -33,6 +33,9 @@ object DwCAExporter extends Tool { var dwcFieldsMap = mutable.LinkedHashMap( "rowkey" -> "", + "abcdIdentificationQualifier" -> "http://rs.tdwg.org/abcd/terms/abcdIdentificationQualifier", + "abcdIdentificationQualifierInsertionPoint" -> "http://rs.tdwg.org/abcd/terms/abcdIdentificationQualifierInsertionPoint", + "abcdTypeStatus" -> "http://rs.tdwg.org/abcd/terms/abcdTypeStatus", "acceptedNameUsage" -> "http://rs.tdwg.org/dwc/terms/acceptedNameUsage", "acceptedNameUsageID" -> "http://rs.tdwg.org/dwc/terms/acceptedNameUsageID", "accessRights" -> "http://purl.org/dc/terms/accessRights", @@ -186,6 +189,7 @@ object DwCAExporter extends Tool { "taxonRemarks" -> "http://rs.tdwg.org/dwc/terms/taxonRemarks", "type" -> "http://purl.org/dc/terms/type", "typeStatus" -> "http://rs.tdwg.org/dwc/terms/typeStatus", + "typifiedName" -> "http://rs.tdwg.org/abcd/terms/typifiedName", "verbatimCoordinates" -> "http://rs.tdwg.org/dwc/terms/verbatimCoordinates", "verbatimCoordinateSystem" -> "http://rs.tdwg.org/dwc/terms/verbatimCoordinateSystem", "verbatimDepth" -> "http://rs.tdwg.org/dwc/terms/verbatimDepth", From c8e489610dcf03680a0438dbd91f4f47f845e9ad Mon Sep 17 00:00:00 2001 From: Mahmoud Date: Tue, 20 Oct 2020 16:36:59 +1100 Subject: [PATCH 10/19] HISPID identifierRole is added fix for AtlasOfLivingAustralia/la-pipelines#167 --- src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala index f321421b1..a36cc5e12 100644 --- a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala +++ b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala @@ -98,6 +98,7 @@ object DwCAExporter extends Tool { "identificationRemarks" -> "http://rs.tdwg.org/dwc/terms/identificationRemarks", "identificationVerificationStatus" -> "http://rs.tdwg.org/dwc/terms/identificationVerificationStatus", "identifiedBy" -> "http://rs.tdwg.org/dwc/terms/identifiedBy", + "identifierRole" -> "http://hiscom.chah.org.au/hispid/terms/identifierRole", "individualCount" -> "http://rs.tdwg.org/dwc/terms/individualCount", "individualID" -> "http://rs.tdwg.org/dwc/terms/individualID", "informationWithheld" -> "http://rs.tdwg.org/dwc/terms/informationWithheld", From 72d9008a778054eede2fa8083673d70833b7fdae Mon Sep 17 00:00:00 2001 From: Mahmoud Date: Tue, 20 Oct 2020 16:38:00 +1100 Subject: [PATCH 11/19] userId added as recordedById fix for AtlasOfLivingAustralia/la-pipelines#166 --- src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala index a36cc5e12..346df146f 100644 --- a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala +++ b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala @@ -191,6 +191,7 @@ object DwCAExporter extends Tool { "type" -> "http://purl.org/dc/terms/type", "typeStatus" -> "http://rs.tdwg.org/dwc/terms/typeStatus", "typifiedName" -> "http://rs.tdwg.org/abcd/terms/typifiedName", + "userId" -> "http://rs.gbif.org/terms/1.0/recordedByID", "verbatimCoordinates" -> "http://rs.tdwg.org/dwc/terms/verbatimCoordinates", "verbatimCoordinateSystem" -> "http://rs.tdwg.org/dwc/terms/verbatimCoordinateSystem", "verbatimDepth" -> "http://rs.tdwg.org/dwc/terms/verbatimDepth", From 1f68884f759a09586a90f05e5ee8e1e54a7d392c Mon Sep 17 00:00:00 2001 From: Mahmoud Date: Tue, 27 Oct 2020 14:36:36 +1100 Subject: [PATCH 12/19] Add photographer as a field in Dwca Fix for atlasoflivingaustralia/la-pipelines#171 --- src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala index 346df146f..9cf76b494 100644 --- a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala +++ b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala @@ -160,6 +160,7 @@ object DwCAExporter extends Tool { "pointRadiusSpatialFit" -> "http://rs.tdwg.org/dwc/terms/pointRadiusSpatialFit", "preparations" -> "http://rs.tdwg.org/dwc/terms/preparations", "previousIdentifications" -> "http://rs.tdwg.org/dwc/terms/previousIdentifications", + "photographer" -> "http://rs.ala.org.au/terms/1.0/photographer", "recordedBy" -> "http://rs.tdwg.org/dwc/terms/recordedBy", "recordNumber" -> "http://rs.tdwg.org/dwc/terms/recordNumber", "relatedResourceID" -> "http://rs.tdwg.org/dwc/terms/relatedResourceID", From 1dc1ae50a523032b58996c22e383b3190e97a274 Mon Sep 17 00:00:00 2001 From: Mahmoud Date: Tue, 27 Oct 2020 14:38:01 +1100 Subject: [PATCH 13/19] Added subfamily, superfamily, species, and subspecies fields to Dwca Fix for atlasoflivingaustralia/la-pipelines#170 --- src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala index 9cf76b494..15d61c5a5 100644 --- a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala +++ b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala @@ -180,10 +180,14 @@ object DwCAExporter extends Tool { "scientificNameID" -> "http://rs.tdwg.org/dwc/terms/scientificNameID", "sex" -> "http://rs.tdwg.org/dwc/terms/sex", "source" -> "http://purl.org/dc/terms/source", + "species" -> "http://rs.ala.org.au/terms/1.0/species", "specificEpithet" -> "http://rs.tdwg.org/dwc/terms/specificEpithet", "startDayOfYear" -> "http://rs.tdwg.org/dwc/terms/startDayOfYear", "stateProvince" -> "http://rs.tdwg.org/dwc/terms/stateProvince", "subgenus" -> "http://rs.tdwg.org/dwc/terms/subgenus", + "subfamily" -> "http://rs.ala.org.au/terms/1.0/subfamily", + "subspecies" -> "http://rs.ala.org.au/terms/1.0/subspecies", + "superfamily" -> "http://rs.ala.org.au/terms/1.0/superfamily", "taxonConceptID" -> "http://rs.tdwg.org/dwc/terms/taxonConceptID", "taxonID" -> "http://rs.tdwg.org/dwc/terms/taxonID", "taxonomicStatus" -> "http://rs.tdwg.org/dwc/terms/taxonomicStatus", From 731fe1c63a945c8de720f6a7ab8dfb4f2b4beec0 Mon Sep 17 00:00:00 2001 From: Mahmoud Date: Tue, 27 Oct 2020 14:56:54 +1100 Subject: [PATCH 14/19] secondaryCollectors added to Dwca Fix for atlasoflivingaustralia/la-pipelines#169 --- src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala index 15d61c5a5..c8451ff8e 100644 --- a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala +++ b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala @@ -178,6 +178,7 @@ object DwCAExporter extends Tool { "scientificName" -> "http://rs.tdwg.org/dwc/terms/scientificName", "scientificNameAuthorship" -> "http://rs.tdwg.org/dwc/terms/scientificNameAuthorship", "scientificNameID" -> "http://rs.tdwg.org/dwc/terms/scientificNameID", + "secondaryCollectors" -> "http://hiscom.chah.org.au/hispid/terms/secondaryCollectors", "sex" -> "http://rs.tdwg.org/dwc/terms/sex", "source" -> "http://purl.org/dc/terms/source", "species" -> "http://rs.ala.org.au/terms/1.0/species", From d3160e8ed20cd9aa405f386fd1fd231de1793780 Mon Sep 17 00:00:00 2001 From: Mahmoud Date: Thu, 29 Oct 2020 14:15:09 +1100 Subject: [PATCH 15/19] AVH GGBN fields added Fix for AtlasOfLivingAustralia/la-pipelines#161 --- src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala index c8451ff8e..70caf239c 100644 --- a/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala +++ b/src/main/scala/au/org/ala/biocache/export/DwCAExporter.scala @@ -111,6 +111,9 @@ object DwCAExporter extends Tool { "language" -> "http://purl.org/dc/terms/language", "license" -> "http://purl.org/dc/terms/license", "lifeStage" -> "http://rs.tdwg.org/dwc/terms/lifeStage", + "loanDate" -> "http://data.ggbn.org/schemas/ggbn/terms/loanDate", + "loanDestination" -> "http://data.ggbn.org/schemas/ggbn/terms/loanDestination", + "loanIdentifier" -> "http://data.ggbn.org/schemas/ggbn/terms/loanIdentifier", "locality" -> "http://rs.tdwg.org/dwc/terms/locality", "locationAccordingTo" -> "http://rs.tdwg.org/dwc/terms/locationAccordingTo", "locationAttributes" -> "http://rs.tdwg.org/dwc/terms/locationAttributes", From 75c5f7baa7b9e2e8c04ceba528102ada0db0cd13 Mon Sep 17 00:00:00 2001 From: alexhuang091 Date: Mon, 7 Dec 2020 17:39:08 +1100 Subject: [PATCH 16/19] fixed an issue about assertion_user_id assertion_user_id now only will be generated when user has an outstanding assertion (status = 50005 or 50001). This fix changed the behaviour so that as long as a user makes an assertion (no matter it's verified or outstanding), his/her id will be in the assertion_user_id of corresponding occurrence record fields. --- .../ala/biocache/dao/OccurrenceDAOImpl.scala | 23 ++++++++++++++----- 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/src/main/scala/au/org/ala/biocache/dao/OccurrenceDAOImpl.scala b/src/main/scala/au/org/ala/biocache/dao/OccurrenceDAOImpl.scala index a491947f1..417ecc9fa 100644 --- a/src/main/scala/au/org/ala/biocache/dao/OccurrenceDAOImpl.scala +++ b/src/main/scala/au/org/ala/biocache/dao/OccurrenceDAOImpl.scala @@ -745,7 +745,7 @@ class OccurrenceDAOImpl extends OccurrenceDAO { } // Updating system assertion, pass in false - val (userAssertionStatus, trueUserAssertions) = getCombinedUserStatus(false, userAssertions) + val (userAssertionStatus, trueUserAssertions, originalAssertions) = getCombinedUserStatus(false, userAssertions) val verified = if (userAssertionStatus == AssertionStatus.QA_VERIFIED || userAssertionStatus == AssertionStatus.QA_CORRECTED) true else false @@ -910,7 +910,7 @@ class OccurrenceDAOImpl extends OccurrenceDAO { persistenceManager.put(rowKey, qaEntityName, qaMap, true, false) val systemAssertions = getSystemAssertions(rowKey) val userAssertions = getUserAssertions(rowKey) - updateAssertionStatus(rowKey, qualityAssertion, systemAssertions, userAssertions :+ qualityAssertion) + updateAssertionStatus(rowKey, qualityAssertion, systemAssertions, userAssertions) //set the last user assertion date persistenceManager.put(rowKey, entityName, FullRecordMapper.lastUserAssertionDateColumn, qualityAssertion.created, false, false) @@ -1008,8 +1008,12 @@ class OccurrenceDAOImpl extends OccurrenceDAO { * If Collection Admin verifies the record, currentAssertion will have * code: 50000 (AssertionCodes.VERIFIED.code), * qaStatus: AssertionStatus.QA_OPEN_ISSUE, AssertionStatus.QA_VERIFIED, AssertionStatus:QA_CORRECTED + * + * Alex updated on 07/12/2020, combinedUserAssertions contains all assertions in 50005 or 50001 state (outstanding assertions) + * originalAssertions contains all assertions users (not admin) made. Keep the original interface just to make sure new change + * doesn't break anything */ - private def getCombinedUserStatus(bVerified: Boolean, userAssertions: List[QualityAssertion]): (Int, ArrayBuffer[QualityAssertion]) = { + private def getCombinedUserStatus(bVerified: Boolean, userAssertions: List[QualityAssertion]): (Int, ArrayBuffer[QualityAssertion], ArrayBuffer[QualityAssertion]) = { // Filter off only verified records val verifiedAssertions = userAssertions.filter(qa => qa.code == AssertionCodes.VERIFIED.code) @@ -1017,6 +1021,12 @@ class OccurrenceDAOImpl extends OccurrenceDAO { // Filter off only user assertions type val assertions = userAssertions.filter(qa => qa.code != AssertionCodes.VERIFIED.code && AssertionStatus.isUserAssertionType(qa.qaStatus)) + var originalAssertions = new ArrayBuffer[QualityAssertion]() + // qa.relatedUuid == null means it's not a verification because a verification must have an associated assertion thus relatedUuid not null + assertions.filter(qa => qa.relatedUuid == null).foreach { + originalAssertions.append(_) + } + // Sort the verified list according to relatedUuid and order of reference rowKey. // RowKey for verified records consist of rowKey|userId|code|recNum where recNum increments everytime a verified record is added val sortedList = verifiedAssertions.sortWith(QualityAssertion.compareByReferenceRowKeyDesc).sortBy(_.relatedUuid) @@ -1072,7 +1082,7 @@ class OccurrenceDAOImpl extends OccurrenceDAO { logger.debug("Overall assertion Status: " + userAssertionStatus) - (userAssertionStatus, combinedUserAssertions) + (userAssertionStatus, combinedUserAssertions, originalAssertions) } /** @@ -1083,7 +1093,7 @@ class OccurrenceDAOImpl extends OccurrenceDAO { logger.debug("Updating the assertion status for : " + rowKey) val bVerified = AssertionCodes.isVerified(assertion) - val (userAssertionStatus, remainingAssertions) = getCombinedUserStatus(bVerified, userAssertions) + val (userAssertionStatus, remainingAssertions, originalAssertions) = getCombinedUserStatus(bVerified, userAssertions) // default to the assertion which is to be evaluated var actualAssertion = assertion @@ -1152,7 +1162,8 @@ class OccurrenceDAOImpl extends OccurrenceDAO { logger.debug("Final " + listErrorCodes) //update the list - persistenceManager.put(rowKey, entityName, FullRecordMapper.userQualityAssertionColumn, Json.toJSON(remainingAssertions.toList), false, false) + // use all user assertions (no matter if it's already verified) to fill userQualityAssertionColumn so that assertion_user_id can be extracted correctly when reindexing + persistenceManager.put(rowKey, entityName, FullRecordMapper.userQualityAssertionColumn, Json.toJSON(originalAssertions.toList), false, false) persistenceManager.putList(rowKey, entityName, FullRecordMapper.markAsQualityAssertion(phase), listErrorCodes.toList, classOf[Int], false, true, false) //set the overall decision if necessary From 65af92d1eb143b01634667efcf166fbe4f66edc1 Mon Sep 17 00:00:00 2001 From: alexhuang091 Date: Wed, 9 Dec 2020 22:46:56 +1100 Subject: [PATCH 17/19] added ut code --- .../org/ala/biocache/AssertionCodeTest.scala | 56 +++++++++++++++++++ 1 file changed, 56 insertions(+) diff --git a/src/test/scala/au/org/ala/biocache/AssertionCodeTest.scala b/src/test/scala/au/org/ala/biocache/AssertionCodeTest.scala index 806abb324..ee5e6dced 100644 --- a/src/test/scala/au/org/ala/biocache/AssertionCodeTest.scala +++ b/src/test/scala/au/org/ala/biocache/AssertionCodeTest.scala @@ -5,6 +5,7 @@ import org.junit.runner.RunWith import au.org.ala.biocache.model.{FullRecord, QualityAssertion, Versions} import au.org.ala.biocache.load.FullRecordMapper import au.org.ala.biocache.processor.RecordProcessor +import au.org.ala.biocache.util.Json import au.org.ala.biocache.vocab.AssertionCodes import au.org.ala.biocache.vocab.AssertionStatus @@ -13,6 +14,7 @@ class AssertionCodeTest extends ConfigFunSuite { val rowKey = "test1" val rowKey2 = "test2" val rowKey3 = "test3" + val rowKey4 = "test4" val uuid = "uuid" val occurrenceDAO = Config.occurrenceDAO @@ -304,6 +306,60 @@ class AssertionCodeTest extends ConfigFunSuite { } } + test("user assertion list") { + val processed = new FullRecord + processed.rowKey = uuid + occurrenceDAO.updateOccurrence(rowKey4, processed, None, Versions.PROCESSED) + + // first qa added + val qa = QualityAssertion.apply(AssertionCodes.GEOSPATIAL_ISSUE, AssertionStatus.QA_UNCONFIRMED); + qa.userId = "hua091@csiro.au" + occurrenceDAO.addUserAssertion(rowKey4, qa) + + var assertionList = Json.toListWithGeneric(Config.persistenceManager.get(rowKey4, "occ", FullRecordMapper.userQualityAssertionColumn).getOrElse(""), classOf[QualityAssertion]).asInstanceOf[List[QualityAssertion]] + expectResult(true) { + occurrenceDAO.getUserAssertions(rowKey4).size == 1 && assertionList.size == 1 + } + + // add second qa + val qa2 = QualityAssertion.apply(AssertionCodes.COORDINATE_HABITAT_MISMATCH, AssertionStatus.QA_UNCONFIRMED) + qa2.userId = "test@csiro.au" + occurrenceDAO.addUserAssertion(rowKey4, qa2) + + assertionList = Json.toListWithGeneric(Config.persistenceManager.get(rowKey4, "occ", FullRecordMapper.userQualityAssertionColumn).getOrElse(""), classOf[QualityAssertion]).asInstanceOf[List[QualityAssertion]] + expectResult(true) { + occurrenceDAO.getUserAssertions(rowKey4).size == 2 && assertionList.size == 2 + } + + // a 50001 verification associated with 1st qa + val qa50001 = QualityAssertion.apply(AssertionCodes.VERIFIED, AssertionStatus.QA_OPEN_ISSUE); + qa50001.userId = "admin@csiro.au" + qa50001.relatedUuid = qa.uuid + occurrenceDAO.addUserAssertion(rowKey4, qa50001) + + assertionList = Json.toListWithGeneric(Config.persistenceManager.get(rowKey4, "occ", FullRecordMapper.userQualityAssertionColumn).getOrElse(""), classOf[QualityAssertion]).asInstanceOf[List[QualityAssertion]] + expectResult(true) { + occurrenceDAO.getUserAssertions(rowKey4).size == 3 && assertionList.size == 2 + } + + occurrenceDAO.deleteUserAssertion(rowKey4, qa50001.uuid) + assertionList = Json.toListWithGeneric(Config.persistenceManager.get(rowKey4, "occ", FullRecordMapper.userQualityAssertionColumn).getOrElse(""), classOf[QualityAssertion]).asInstanceOf[List[QualityAssertion]] + expectResult(true) { + occurrenceDAO.getUserAssertions(rowKey4).size == 2 && assertionList.size == 2 + } + + // a 50002 verification associated with qa2 + val qa50002 = QualityAssertion.apply(AssertionCodes.VERIFIED, AssertionStatus.QA_VERIFIED); + qa50002.userId = "admin1@csiro.au" + qa50002.relatedUuid = qa2.uuid + occurrenceDAO.addUserAssertion(rowKey4, qa50002) + + assertionList = Json.toListWithGeneric(Config.persistenceManager.get(rowKey4, "occ", FullRecordMapper.userQualityAssertionColumn).getOrElse(""), classOf[QualityAssertion]).asInstanceOf[List[QualityAssertion]] + expectResult(true) { + occurrenceDAO.getUserAssertions(rowKey4).size == 3 && assertionList.size == 2 + } + } + test("Test add adhoc System assertion") { import AssertionStatus._ From d3e11d74dc7e6de437d015c5ac0d9e9403a01820 Mon Sep 17 00:00:00 2001 From: "alex.huang" Date: Tue, 9 Mar 2021 15:46:53 +1100 Subject: [PATCH 18/19] Hotfix/data quality (#410) * AtlasOfLivingAustralia/DataQuality#187 Add user duplicate record assertion * AtlasOfLivingAustralia/DataQuality#187 Add related reason text field Co-authored-by: Simon Bear --- conf/cassandra3_case_sensitive_schema.txt | 2 ++ conf/cassandra3_schema.txt | 4 ++- .../ala/biocache/model/QualityAssertion.scala | 28 ++++++++++--------- .../ala/biocache/vocab/AssertionCodes.scala | 3 +- .../au/org/ala/biocache/IndexingTest.scala | 2 +- 5 files changed, 23 insertions(+), 16 deletions(-) diff --git a/conf/cassandra3_case_sensitive_schema.txt b/conf/cassandra3_case_sensitive_schema.txt index 547820070..d407510e3 100644 --- a/conf/cassandra3_case_sensitive_schema.txt +++ b/conf/cassandra3_case_sensitive_schema.txt @@ -114,6 +114,8 @@ CREATE TABLE occ.qa ( "userDisplayName" text, "userEmail" text, uuid text, + relatedrecordid text, + relatedrecordreason text, PRIMARY KEY (rowkey, "userId", code) ) WITH CLUSTERING ORDER BY ("userId" ASC, code ASC) AND bloom_filter_fp_chance = 0.01 diff --git a/conf/cassandra3_schema.txt b/conf/cassandra3_schema.txt index 9eb92e14f..78f42daf0 100644 --- a/conf/cassandra3_schema.txt +++ b/conf/cassandra3_schema.txt @@ -105,7 +105,9 @@ CREATE TABLE qa ( userentityuid text, userid text, userrole text, - value text + value text, + relatedrecordid text, + relatedrecordreason text, ) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' diff --git a/src/main/scala/au/org/ala/biocache/model/QualityAssertion.scala b/src/main/scala/au/org/ala/biocache/model/QualityAssertion.scala index 31eebe098..222bdb1dd 100644 --- a/src/main/scala/au/org/ala/biocache/model/QualityAssertion.scala +++ b/src/main/scala/au/org/ala/biocache/model/QualityAssertion.scala @@ -23,48 +23,48 @@ object QualityAssertion { if(errorCode.isEmpty){ throw new Exception("Unrecognised code: " + code) } - new QualityAssertion(uuid,null,errorCode.get.name,errorCode.get.code,null,null,2,null,null,null,null,null,null,null,null,new Date()) + new QualityAssertion(uuid,null,errorCode.get.name,errorCode.get.code,null,null,2,null,null,null,null,null,null,null,null,new Date(),null,null) } def apply(errorCode:ErrorCode) = { val uuid = UUID.randomUUID.toString - new QualityAssertion(uuid,null,errorCode.name,errorCode.code,null,null,0,null,null,null,null,null,null,null,null,new Date()) + new QualityAssertion(uuid,null,errorCode.name,errorCode.code,null,null,0,null,null,null,null,null,null,null,null,new Date(),null,null) } def apply(errorCode:ErrorCode,problemAsserted:Boolean) = { val uuid = UUID.randomUUID.toString - new QualityAssertion(uuid,null,errorCode.name,errorCode.code,null,null,if(problemAsserted) 0 else 1,null,null,null,null,null,null,null,null,new Date()) + new QualityAssertion(uuid,null,errorCode.name,errorCode.code,null,null,if(problemAsserted) 0 else 1,null,null,null,null,null,null,null,null,new Date(),null,null) } def apply(errorCode:ErrorCode,problemAsserted:Boolean,comment:String) = { val uuid = UUID.randomUUID.toString - new QualityAssertion(uuid,null,errorCode.name,errorCode.code,null,null,if(problemAsserted) 0 else 1,comment,null,null,null,null,null,null,null,new Date()) + new QualityAssertion(uuid,null,errorCode.name,errorCode.code,null,null,if(problemAsserted) 0 else 1,comment,null,null,null,null,null,null,null,new Date(),null,null) } def apply(errorCode:ErrorCode,comment:String) = { val uuid = UUID.randomUUID.toString - new QualityAssertion(uuid,null,errorCode.name,errorCode.code,null,null,0,comment,null,null,null,null,null,null,null,new Date()) + new QualityAssertion(uuid,null,errorCode.name,errorCode.code,null,null,0,comment,null,null,null,null,null,null,null,new Date(),null,null) } def apply(errorCode:ErrorCode, qaStatus:Int, comment:String)={ val uuid = UUID.randomUUID.toString - new QualityAssertion(uuid,null, errorCode.name, errorCode.code,null,null, qaStatus, comment, null,null,null,null,null,null,null,new Date()) + new QualityAssertion(uuid,null, errorCode.name, errorCode.code,null,null, qaStatus, comment, null,null,null,null,null,null,null,new Date(),null,null) } def apply(errorCode:ErrorCode, qaStatus:Int)={ val uuid = UUID.randomUUID.toString - new QualityAssertion(uuid,null, errorCode.name, errorCode.code,null,null, qaStatus, null, null,null,null,null,null,null,null,new Date()) + new QualityAssertion(uuid,null, errorCode.name, errorCode.code,null,null, qaStatus, null, null,null,null,null,null,null,null,new Date(),null,null) } def apply(assertionCode:Int,problemAsserted:Boolean,comment:String) = { val uuid = UUID.randomUUID.toString - new QualityAssertion(uuid,null,null,assertionCode,null,null,if(problemAsserted) 0 else 1,comment,null,null,null,null,null,null,null,new Date()) + new QualityAssertion(uuid,null,null,assertionCode,null,null,if(problemAsserted) 0 else 1,comment,null,null,null,null,null,null,null,new Date(),null,null) } def apply(assertionCode:Int, qaStatus:Int, comment:String) ={ val uuid = UUID.randomUUID().toString - new QualityAssertion(uuid,null, null, assertionCode,null,null,qaStatus,comment,null,null,null,null,null,null,null,new Date()) + new QualityAssertion(uuid,null, null, assertionCode,null,null,qaStatus,comment,null,null,null,null,null,null,null,new Date(),null,null) } def apply(assertionCode:Int, qaStatus:Int) ={ val uuid = UUID.randomUUID().toString - new QualityAssertion(uuid,null, null, assertionCode,null,null,qaStatus,null,null,null,null,null,null,null,null,new Date()) + new QualityAssertion(uuid,null, null, assertionCode,null,null,qaStatus,null,null,null,null,null,null,null,null,new Date(),null,null) } def apply(errorCode:ErrorCode, relatedUuid: String, qaStatus:Int) = { val uuid = UUID.randomUUID().toString - new QualityAssertion(uuid,null, errorCode.name, errorCode.code,null,relatedUuid,qaStatus,null,null,null,null,null,null,null,null,new Date()) + new QualityAssertion(uuid,null, errorCode.name, errorCode.code,null,relatedUuid,qaStatus,null,null,null,null,null,null,null,null,new Date(),null,null) } @@ -107,13 +107,15 @@ class QualityAssertion ( @BeanProperty var userRole:String, //null for system assertions, example - collection manager @BeanProperty var userEntityUid:String, //null for system assertions, example - co13 @BeanProperty var userEntityName:String, //null for system assertions, example - ANIC - @BeanProperty var created:String) + @BeanProperty var created:String, + @BeanProperty var relatedRecordId:String, + @BeanProperty var relatedRecordReason:String) extends Cloneable with Comparable[AnyRef] with POSO { // override def toString :String = s"name:$name, code:$code, value:$value, comment:$comment, qaStatus:$qaStatus, relatedUuid:$relatedUuid" override def toString :String = s"code:$code, qaStatus:$getQAStatusName, uuid:$uuid, relatedUuid:$relatedUuid, created:$created \n" - def this() = this(null,null,null,-1,false,null,2,null,null,null,null,null,null,null,null,null) + def this() = this(null,null,null,-1,false,null,2,null,null,null,null,null,null,null,null,null,null,null) override def clone : QualityAssertion = super.clone.asInstanceOf[QualityAssertion] override def equals(that: Any) = that match { case other: QualityAssertion => { diff --git a/src/main/scala/au/org/ala/biocache/vocab/AssertionCodes.scala b/src/main/scala/au/org/ala/biocache/vocab/AssertionCodes.scala index c97f4bfa3..07ebb9ace 100644 --- a/src/main/scala/au/org/ala/biocache/vocab/AssertionCodes.scala +++ b/src/main/scala/au/org/ala/biocache/vocab/AssertionCodes.scala @@ -98,6 +98,7 @@ object AssertionCodes { val UNRECOGNISED_OCCURRENCE_STATUS = ErrorCode("unrecognisedOccurrenceStatus", 20017, false, "Occurrence status not recognised", Error) val ASSUMED_PRESENT_OCCURRENCE_STATUS = ErrorCode("assumedPresentOccurrenceStatus", 20018, false, "Occurrence status assumed to be present", Warning) val USER_ASSERTION_OTHER = ErrorCode("userAssertionOther", 20019,false,"Other error", Error) + val USER_DUPLICATE_RECORD = ErrorCode("userDuplicateRecord",20020,false,"The occurrence appears to be a duplicate", Warning) //temporal issues val TEMPORAL_ISSUE = ErrorCode("temporalIssue",30000,false,"Temporal issue", Error) // general purpose option @@ -151,7 +152,7 @@ object AssertionCodes { val miscellaneousCodes = all.filter(errorCode => {errorCode.code>=20000 && errorCode.code<30000}) val temporalCodes = all.filter(errorCode => {errorCode.code>=30000 && errorCode.code<40000}) - val userAssertionCodes = Array(GEOSPATIAL_ISSUE,COORDINATE_HABITAT_MISMATCH,DETECTED_OUTLIER,TAXONOMIC_ISSUE,IDENTIFICATION_INCORRECT,TEMPORAL_ISSUE,USER_ASSERTION_OTHER) + val userAssertionCodes = Array(GEOSPATIAL_ISSUE,COORDINATE_HABITAT_MISMATCH,DETECTED_OUTLIER,TAXONOMIC_ISSUE,IDENTIFICATION_INCORRECT,TEMPORAL_ISSUE,USER_DUPLICATE_RECORD,USER_ASSERTION_OTHER) //the assertions that are NOT performed during the processing phase val offlineAssertionCodes = Array(INFERRED_DUPLICATE_RECORD, SPECIES_OUTSIDE_EXPERT_RANGE, DETECTED_OUTLIER) diff --git a/src/test/scala/au/org/ala/biocache/IndexingTest.scala b/src/test/scala/au/org/ala/biocache/IndexingTest.scala index fc950ac18..e0a3e6d40 100644 --- a/src/test/scala/au/org/ala/biocache/IndexingTest.scala +++ b/src/test/scala/au/org/ala/biocache/IndexingTest.scala @@ -77,7 +77,7 @@ class IndexingTest extends ConfigFunSuite { //previous tests may have added data logger.info("LSID count = " + lsidCount) expectResult(true){lsidCount >= 10000} - logger.info("LSID count = " + nameCount) + logger.info("Name count = " + nameCount) expectResult(true){nameCount >= 10000} if(merged.exists()) FileUtils.deleteDirectory(merged) From 85fdc4b0a277c24e95a15b16944014a1e2f459b4 Mon Sep 17 00:00:00 2001 From: alexhuang091 Date: Mon, 22 Mar 2021 08:34:21 +1100 Subject: [PATCH 19/19] to release 2.6.1 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index d8a15f5f6..d0b97580e 100644 --- a/pom.xml +++ b/pom.xml @@ -9,7 +9,7 @@ au.org.ala biocache-store - 2.6.1-SNAPSHOT + 2.6.1 https://biocache.ala.org.au GitHub