Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/develop'
Browse files Browse the repository at this point in the history
# Conflicts:
#	build.gradle
  • Loading branch information
Adam Collins committed Dec 15, 2023
2 parents 2a572a5 + 510adae commit f5efa18
Show file tree
Hide file tree
Showing 23 changed files with 571 additions and 563 deletions.
4 changes: 2 additions & 2 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ repositories {
}

group = 'au.org.ala'
version = '3.2.2'
version = '3.3.0'


boolean inplace = false
Expand Down Expand Up @@ -130,7 +130,7 @@ dependencies {

} else {

implementation 'au.org.ala:ala-ws-spring-security:6.0.4'
implementation 'au.org.ala:ala-ws-spring-security:6.2.0'
}

implementation 'org.codehaus.groovy:groovy-all:3.0.11'
Expand Down
2 changes: 0 additions & 2 deletions src/main/java/au/org/ala/biocache/dao/QidCacheDAOImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -502,8 +502,6 @@ synchronized public void clear() {
synchronized (counterLock) {
cache.clear();
cacheSize = 0;
dataQualityService.clearCache();
}

}
}
4 changes: 2 additions & 2 deletions src/main/java/au/org/ala/biocache/dao/SearchDAO.java
Original file line number Diff line number Diff line change
Expand Up @@ -75,12 +75,12 @@ public interface SearchDAO {
*
* @param downloadParams
* @param out
* @param uidStats
* @param downloadStats
* @param parallelQueryExecutor The ExecutorService to manage parallel query executions
* @return
* @throws Exception
*/
DownloadHeaders writeResultsFromIndexToStream(DownloadRequestDTO downloadParams, OutputStream out, ConcurrentMap<String, AtomicInteger> uidStats, DownloadDetailsDTO dd, boolean checkLimit, ExecutorService parallelQueryExecutor) throws Exception;
DownloadHeaders writeResultsFromIndexToStream(DownloadRequestDTO downloadParams, OutputStream out, DownloadStats downloadStats, DownloadDetailsDTO dd, boolean checkLimit, ExecutorService parallelQueryExecutor) throws Exception;

/**
* Write coordinates out to the supplied stream.
Expand Down
25 changes: 16 additions & 9 deletions src/main/java/au/org/ala/biocache/dao/SearchDAOImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import au.org.ala.biocache.writer.CSVRecordWriter;
import au.org.ala.biocache.writer.RecordWriterError;
import au.org.ala.biocache.writer.TSVRecordWriter;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import au.org.ala.ws.security.profile.AlaUserProfile;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
Expand All @@ -42,6 +42,8 @@
import org.slf4j.MDC;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.CacheManager;
//import org.springframework.cache.annotation.Cacheable;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.context.support.AbstractMessageSource;
import org.springframework.stereotype.Component;
Expand Down Expand Up @@ -575,7 +577,7 @@ public void writeCoordinatesToStream(SpatialSearchRequestDTO searchParams, Outpu
@Override
public DownloadHeaders writeResultsFromIndexToStream(final DownloadRequestDTO downloadParams,
final OutputStream out,
final ConcurrentMap<String, AtomicInteger> uidStats,
final DownloadStats downloadStats,
final DownloadDetailsDTO dd,
boolean checkLimit,
ExecutorService nextExecutor) throws Exception {
Expand All @@ -600,7 +602,7 @@ public DownloadHeaders writeResultsFromIndexToStream(final DownloadRequestDTO do
// submit download to executor
if (nextExecutor != null) {
// TODO: remove when deprecated services are removed: /occurrences/download and /occurrences/download/batchFile
Future future = nextExecutor.submit(prepareDownloadRunner(downloadParams, downloadHeaders, dd, uidStats, recordWriter));
Future future = nextExecutor.submit(prepareDownloadRunner(downloadParams, downloadHeaders, dd, downloadStats, recordWriter));

// wait for download to finish
// Busy wait because we need to be able to respond to an interrupt on any callable
Expand All @@ -622,7 +624,7 @@ public DownloadHeaders writeResultsFromIndexToStream(final DownloadRequestDTO do
} while (waitAgain);
} else {
// This is already running in an executor
prepareDownloadRunner(downloadParams, downloadHeaders, dd, uidStats, recordWriter).call();
prepareDownloadRunner(downloadParams, downloadHeaders, dd, downloadStats, recordWriter).call();
}


Expand All @@ -644,7 +646,7 @@ private RecordWriter createRecordWriter(DownloadRequestDTO downloadParams, Downl
}

private Callable prepareDownloadRunner(DownloadRequestDTO downloadParams, DownloadHeaders downloadHeaders,
DownloadDetailsDTO dd, ConcurrentMap<String, AtomicInteger> uidStats,
DownloadDetailsDTO dd, DownloadStats downloadStats,
RecordWriter recordWriter) throws QidMissingException {
queryFormatUtils.formatSearchQuery(downloadParams);

Expand All @@ -670,7 +672,7 @@ private Callable prepareDownloadRunner(DownloadRequestDTO downloadParams, Downlo
queries.add(solrQuery);
}

ProcessDownload procDownload = new ProcessDownload(uidStats, downloadHeaders, recordWriter, dd,
ProcessDownload procDownload = new ProcessDownload(downloadStats, downloadHeaders, recordWriter, dd,
checkDownloadLimits, downloadService.dowloadOfflineMaxSize,
listsService, layersService);

Expand Down Expand Up @@ -827,8 +829,8 @@ private void addPostProcessingFields(DownloadRequestDTO downloadParams, Download
}).collect(Collectors.toList()).toArray(new String[0]);
}

// fields required for logger.ala
requestFields(downloadHeaders, new String[]{DATA_PROVIDER_UID, INSTITUTION_UID, COLLECTION_UID, DATA_RESOURCE_UID});
// fields required for logger.ala and doi minting
requestFields(downloadHeaders, new String[]{DATA_PROVIDER_UID, INSTITUTION_UID, COLLECTION_UID, DATA_RESOURCE_UID, LICENSE});

// 'lft' and 'rgt' is mandatory when there are species list fields
if (downloadHeaders.speciesListIds.length > 0) {
Expand Down Expand Up @@ -1577,6 +1579,10 @@ private void logError(String description, String message) {
}


@Inject
CacheManager cacheManager;

@Cacheable("legendCache")
public List<LegendItem> getLegend(SpatialSearchRequestDTO searchParams, String facetField, String[] cutpoints) throws Exception {
return getLegend(searchParams, facetField, cutpoints, false);
}
Expand Down Expand Up @@ -2067,7 +2073,8 @@ String getFacetValueDisplayName(String facet, String value) {
//1850-01-01T00:00:00Z
} else if (searchUtils.getAuthIndexFields().contains(tFacet)) {
//if the facet field is collector or assertion_user_id we need to perform the substitution
return authService.getDisplayNameFor(value);
Optional<AlaUserProfile> profile = authService.lookupAuthUser(value);
return profile.isPresent() ? profile.get().getName() : value;
} else {
if (messageSource != null) {

Expand Down
29 changes: 29 additions & 0 deletions src/main/java/au/org/ala/biocache/dto/DownloadStats.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
package au.org.ala.biocache.dto;

import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;

public class DownloadStats {

ConcurrentMap<String, AtomicInteger> uidStats = new ConcurrentHashMap<>();

Set<String> licences = new HashSet<>();

public DownloadStats() {
}

public ConcurrentMap<String, AtomicInteger> getUidStats() {
return uidStats;
}

public Set<String> getLicences() {
return licences;
}

public void addLicence(String licence) {
licences.add(licence);
}
}
2 changes: 1 addition & 1 deletion src/main/java/au/org/ala/biocache/dto/OccurrenceIndex.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@

import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonIgnore;
import io.swagger.annotations.ApiModelProperty;
import io.swagger.v3.oas.annotations.media.Schema;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateFormatUtils;
Expand Down Expand Up @@ -77,6 +76,7 @@ public class OccurrenceIndex {
final static public String INSTITUTION_UID = "institutionUid";
final static public String DATA_PROVIDER_UID = "dataProviderUid";
final static public String DATA_RESOURCE_UID = "dataResourceUid";
final static public String LICENSE = "license";

final public static String LOCALITY = "locality";
final public static String BIOME = "biome";
Expand Down
98 changes: 83 additions & 15 deletions src/main/java/au/org/ala/biocache/service/AlaLayersService.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,21 @@
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.converter.FormHttpMessageConverter;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.util.StreamUtils;
import org.springframework.web.client.RestOperations;
import org.springframework.web.client.RestTemplate;

import javax.annotation.PostConstruct;
import javax.inject.Inject;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.URL;
import java.net.URLConnection;
Expand All @@ -38,6 +46,7 @@
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.zip.ZipInputStream;

/**
* The ALA Spatial portal implementation for the layer service.
Expand All @@ -50,9 +59,11 @@ public class AlaLayersService implements LayersService {

private final static Logger logger = LoggerFactory.getLogger(AlaLayersService.class);

private Map<String,String> idToNameMap = RestartDataService.get(this, "idToNameMap", new TypeReference<HashMap<String, String>>(){}, HashMap.class);
private List<Map<String,Object>> layers = RestartDataService.get(this, "layers", new TypeReference<ArrayList<Map<String, Object>>>(){}, ArrayList.class);
private Map<String,String> extraLayers = new HashMap<String,String>();
private Map<String, String> idToNameMap = RestartDataService.get(this, "idToNameMap", new TypeReference<HashMap<String, String>>() {
}, HashMap.class);
private List<Map<String, Object>> layers = RestartDataService.get(this, "layers", new TypeReference<ArrayList<Map<String, Object>>>() {
}, ArrayList.class);
private Map<String, String> extraLayers = new HashMap<String, String>();

//NC 20131018: Allow cache to be disabled via config (enabled by default)
@Value("${caches.layers.enabled:true}")
Expand All @@ -67,9 +78,12 @@ public class AlaLayersService implements LayersService {
@Value("${layers.service.url:https://spatial.ala.org.au/ws}")
protected String layersServiceUrl;

protected Map<String, Integer> distributions = RestartDataService.get(this, "distributions", new TypeReference<HashMap<String, Integer>>(){}, HashMap.class);
protected Map<String, Integer> checklists = RestartDataService.get(this, "checklists", new TypeReference<HashMap<String, Integer>>(){}, HashMap.class);
protected Map<String, Integer> tracks = RestartDataService.get(this, "tracks", new TypeReference<HashMap<String, Integer>>(){}, HashMap.class);
protected Map<String, Integer> distributions = RestartDataService.get(this, "distributions", new TypeReference<HashMap<String, Integer>>() {
}, HashMap.class);
protected Map<String, Integer> checklists = RestartDataService.get(this, "checklists", new TypeReference<HashMap<String, Integer>>() {
}, HashMap.class);
protected Map<String, Integer> tracks = RestartDataService.get(this, "tracks", new TypeReference<HashMap<String, Integer>>() {
}, HashMap.class);

@Inject
private RestOperations restTemplate; // NB MappingJacksonHttpMessageConverter() injected by Spring
Expand All @@ -86,19 +100,22 @@ public Map<String, String> getLayerNameMap() {
}

@Scheduled(fixedDelay = 43200000)// schedule to run every 12 hours
public void refreshCache(){
public void refreshCache() {
init();
}

@PostConstruct
public void init() {
// add FormHttpMessageConverter
((RestTemplate) restTemplate).getMessageConverters().add(new FormHttpMessageConverter());

if (layers.size() > 0) {
//data exists, no need to wait
wait.countDown();
}

//initialise the cache based on the values at https://spatial.ala.org.au/ws/fields
if(enabled){
if (enabled) {
new Thread() {
@Override
public void run() {
Expand Down Expand Up @@ -134,6 +151,7 @@ public void run() {
wait.countDown();
}
}

@Override
public String getName(String code) {
try {
Expand All @@ -157,7 +175,7 @@ public String findAnalysisLayerName(String analysisLayer, String layersServiceUr
}

String found = null;
if(StringUtils.isNotBlank(url)) {
if (StringUtils.isNotBlank(url)) {
String intersectUrl = null;
try {
//get analysis layer display name
Expand All @@ -176,7 +194,7 @@ public String findAnalysisLayerName(String analysisLayer, String layersServiceUr
return found;
}

public Integer getDistributionsCount(String lsid){
public Integer getDistributionsCount(String lsid) {
try {
wait.await();
} catch (InterruptedException e) {
Expand All @@ -187,7 +205,7 @@ public Integer getDistributionsCount(String lsid){
return count != null ? count : 0;
}

public Integer getChecklistsCount(String lsid){
public Integer getChecklistsCount(String lsid) {
try {
wait.await();
} catch (InterruptedException e) {
Expand All @@ -198,7 +216,7 @@ public Integer getChecklistsCount(String lsid){
return count != null ? count : 0;
}

public Integer getTracksCount(String lsid){
public Integer getTracksCount(String lsid) {
try {
wait.await();
} catch (InterruptedException e) {
Expand All @@ -214,7 +232,7 @@ private Map initDistribution(String type) {

String url = layersServiceUrl + "/" + type;
try {
if(org.apache.commons.lang.StringUtils.isNotBlank(layersServiceUrl)) {
if (org.apache.commons.lang.StringUtils.isNotBlank(layersServiceUrl)) {
//get distributions
List json = restTemplate.getForObject(url, List.class);
if (json != null) {
Expand Down Expand Up @@ -246,11 +264,61 @@ public String getLayersServiceUrl() {
}

@Override
public Reader sample(String[] analysisLayers, double[][] points, Object o) {
// TODO: for on the fly intersection of layers not indexed
public Reader sample(String url, String[] analysisLayers, double[][] points) {
// These are batches of 1000, so it should not take long to finish. 1000ms delay between status checks
long sleepTime = 1000;

try {
HttpHeaders requestHeaders = new HttpHeaders();
requestHeaders.setContentType(MediaType.APPLICATION_FORM_URLENCODED);

HttpEntity<MultiValueMap<String, String>> request = new HttpEntity<>(sampleBody(analysisLayers, points), requestHeaders);
Map status = (Map) restTemplate.postForObject(url + "/intersect/batch", request, Map.class);

while (status.containsKey("statusUrl")) {
Thread.sleep(sleepTime);
status = (Map) restTemplate.getForObject((String) status.get("statusUrl"), Map.class);
}

if (status.containsKey("downloadUrl")) {
URLConnection connection = new URL((String) status.get("downloadUrl")).openConnection();
ZipInputStream zis = new ZipInputStream(connection.getInputStream());
zis.getNextEntry();
return new InputStreamReader(zis);
}
} catch (Exception e) {
logger.error("layer sampling failed: " + url + ", " + e.getMessage());
}
return null;
}

private MultiValueMap<String, String> sampleBody(String[] analysisLayers, double[][] points) {
MultiValueMap<String, String> response = new LinkedMultiValueMap<>();
StringBuilder sb = new StringBuilder();

//sb.append("fids=");
for (int i = 0; i < analysisLayers.length; i++) {
if (i > 0) {
sb.append(",");
}
sb.append(analysisLayers[i]);
}
response.add("fids", sb.toString());

//sb.append("&points=");
sb = new StringBuilder();
for (int i = 0; i < points.length; i++) {
if (i > 0) {
sb.append(",");
}
sb.append(points[i][1]).append(",").append(points[i][0]);
}
response.add("points", sb.toString());

//return sb.toString();
return response;
}

@Cacheable("spatialObject")
@Override
public SpatialObjectDTO getObject(String spatialObjectId) {
Expand Down
Loading

0 comments on commit f5efa18

Please sign in to comment.